ash-0.38.0+1.3.281/.cargo_vcs_info.json0000644000000001410000000000100125010ustar { "git": { "sha1": "b724b78dac8d83879ed7a1aad2b91bb9f2beb5cf" }, "path_in_vcs": "ash" }ash-0.38.0+1.3.281/Cargo.toml0000644000000026030000000000100105040ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" rust-version = "1.69.0" name = "ash" version = "0.38.0+1.3.281" authors = [ "Maik Klein ", "Benjamin Saunders ", "Marijn Suijten ", ] description = "Vulkan bindings for Rust" readme = "README.md" keywords = [ "gamedev", "graphics", "vulkan", "bindings", ] categories = [ "api-bindings", "external-ffi-bindings", "game-development", "graphics", "rendering::graphics-api", ] license = "MIT OR Apache-2.0" repository = "https://github.com/ash-rs/ash" [package.metadata.docs.rs] all-features = true rustdoc-args = [ "--cfg", "docsrs", ] [package.metadata.release] no-dev-version = true [dependencies.libloading] version = "0.8" optional = true [features] debug = [] default = [ "loaded", "debug", "std", ] linked = [] loaded = [ "libloading", "std", ] std = [] ash-0.38.0+1.3.281/Cargo.toml.orig000064400000000000000000000021151046102023000141630ustar 00000000000000[package] name = "ash" version = "0.38.0+1.3.281" authors = [ "Maik Klein ", "Benjamin Saunders ", "Marijn Suijten ", ] description = "Vulkan bindings for Rust" license = "MIT OR Apache-2.0" repository = "https://github.com/ash-rs/ash" readme = "../README.md" keywords = ["gamedev", "graphics", "vulkan", "bindings"] categories = [ "api-bindings", "external-ffi-bindings", "game-development", "graphics", "rendering::graphics-api" ] edition = "2021" rust-version = "1.69.0" [dependencies] libloading = { version = "0.8", optional = true } [features] default = ["loaded", "debug", "std"] # Link the Vulkan loader at compile time. linked = [] # Support searching for the Vulkan loader manually at runtime. loaded = ["libloading", "std"] # Whether Vulkan structs should implement Debug. debug = [] # Whether the standard library should be required std = [] [package.metadata.release] no-dev-version = true [package.metadata.docs.rs] all-features = true rustdoc-args = ["--cfg", "docsrs"] ash-0.38.0+1.3.281/LICENSE-APACHE000064400000000000000000000227331046102023000132300ustar 00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: You must give any other recipients of the Work or Derivative Works a copy of this License; and You must cause any modified files to carry prominent notices stating that You changed the files; and You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS Copyright 2016 Maik Klein Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ash-0.38.0+1.3.281/LICENSE-MIT000064400000000000000000000020271046102023000127320ustar 00000000000000Copyright (c) 2016 ASH Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ash-0.38.0+1.3.281/README.md000064400000000000000000000257461046102023000125720ustar 00000000000000# Ash A very lightweight wrapper around Vulkan [![Crates.io Version](https://img.shields.io/crates/v/ash.svg)](https://crates.io/crates/ash) [![Documentation](https://docs.rs/ash/badge.svg)](https://docs.rs/ash) [![Build Status](https://github.com/ash-rs/ash/workflows/CI/badge.svg)](https://github.com/ash-rs/ash/actions?workflow=CI) [![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE-MIT) [![LICENSE](https://img.shields.io/badge/license-Apache--2.0-blue.svg)](LICENSE-APACHE) [![Join the chat at https://gitter.im/MaikKlein/ash](https://badges.gitter.im/MaikKlein/ash.svg)](https://gitter.im/MaikKlein/ash?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![MSRV](https://img.shields.io/badge/rustc-1.69.0+-ab6000.svg)](https://blog.rust-lang.org/2023/04/20/Rust-1.69.0.html) ## Overview - [x] A true Vulkan API without compromises - [x] Convenience features without limiting functionality - [x] Additional type safety - [x] Device local function pointer loading - [x] No validation, everything is **unsafe** - [x] Lifetime-safety on structs created with the builder pattern - [x] Generated from `vk.xml` - [x] Support for Vulkan `1.1`, `1.2`, `1.3` - [x] `no_std` support ## ⚠️ Semver compatibility warning The Vulkan Video bindings are experimental and still seeing breaking changes in their upstream specification, and are only provided by Ash for early adopters. All related functions and types are semver-exempt [^1] (we allow breaking API changes while releasing Ash with non-breaking semver bumps). [^1]: `generator` complexity makes it so that we cannot easily hide these bindings behind a non-`default` feature flag, and they are widespread across the generated codebase. ## Features ### Explicit returns with `Result` ```rust // function signature pub fn create_instance(&self, create_info: &vk::InstanceCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>) -> Result { .. } let instance = entry.create_instance(&create_info, None) .expect("Instance creation error"); ``` ### `Vec` instead of mutable slices ```rust pub fn get_swapchain_images(&self, swapchain: vk::SwapchainKHR) -> VkResult>; let present_images = swapchain_loader.get_swapchain_images_khr(swapchain).unwrap(); ``` _Note_: Functions don't return `Vec` if this would limit the functionality. See `p_next`. ### Slices ```rust pub fn cmd_pipeline_barrier(&self, command_buffer: vk::CommandBuffer, src_stage_mask: vk::PipelineStageFlags, dst_stage_mask: vk::PipelineStageFlags, dependency_flags: vk::DependencyFlags, memory_barriers: &[vk::MemoryBarrier<'_>], buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>], image_memory_barriers: &[vk::ImageMemoryBarrier<'_>]); ``` ### Strongly typed handles Each Vulkan handle type is exposed as a newtyped struct for improved type safety. Null handles can be constructed with `T::null()`, and handles may be freely converted to and from `u64` with `Handle::from_raw` and `Handle::as_raw` for interop with non-Ash Vulkan code. ### Builder pattern ```rust let queue_info = [vk::DeviceQueueCreateInfo::default() .queue_family_index(queue_family_index) .queue_priorities(&priorities)]; let device_create_info = vk::DeviceCreateInfo::default() .queue_create_infos(&queue_info) .enabled_extension_names(&device_extension_names_raw) .enabled_features(&features); let device: Device = instance .create_device(pdevice, &device_create_info, None) .unwrap(); ``` ### Pointer chains Use `base.push_next(ext)` to insert `ext` at the front of the pointer chain attached to `base`. ```rust let mut variable_pointers = vk::PhysicalDeviceVariablePointerFeatures::default(); let mut corner = vk::PhysicalDeviceCornerSampledImageFeaturesNV::default(); let mut device_create_info = vk::DeviceCreateInfo::default() .push_next(&mut corner) .push_next(&mut variable_pointers); ``` The generic argument of `.push_next()` only allows valid structs to extend a given struct (known as [`structextends` in the Vulkan registry](https://registry.khronos.org/vulkan/specs/1.3/styleguide.html#extensions-interactions), mapped to `Extends*` traits). Only structs that are listed one or more times in any `structextends` will implement a `.push_next()`. ### Flags and constants as associated constants ```rust // Bitflag vk::AccessFlags::COLOR_ATTACHMENT_READ | vk::AccessFlags::COLOR_ATTACHMENT_WRITE ``` ```rust // Constant vk::PipelineBindPoint::GRAPHICS, ``` ### Debug/Display for Flags ```rust let flag = vk::AccessFlags::COLOR_ATTACHMENT_READ | vk::AccessFlags::COLOR_ATTACHMENT_WRITE; println!("Debug: {:?}", flag); println!("Display: {}", flag); // Prints: // Debug: AccessFlags(110000000) // Display: COLOR_ATTACHMENT_READ | COLOR_ATTACHMENT_WRITE ``` ### Function pointer loading Ash also takes care of loading the function pointers. Function pointers are split into 3 categories. - Entry: Loads the Vulkan library. Needs to outlive `Instance` and `Device`. - Instance: Loads instance level functions. Needs to outlive the `Device`s it has created. - Device: Loads device **local** functions. The loader is just one possible implementation: - Device level functions are retrieved on a per device basis. - Everything is loaded by default, functions that failed to load are initialized to a function that always panics. - Do not call Vulkan 1.1 functions if you have created a 1.0 instance. Doing so will result in a panic. Custom loaders can be implemented. ### Extension loading Additionally, every Vulkan extension has to be loaded explicitly. You can find all extensions directly under `ash::*` in a module with their prefix (e.g. `khr` or `ext`). ```rust use ash::khr; let swapchain_loader = khr::swapchain::Device::new(&instance, &device); let swapchain = swapchain_loader.create_swapchain(&swapchain_create_info).unwrap(); ``` ### Raw function pointers Raw function pointers are available, if something hasn't been exposed yet in the higher level API. Please open an issue if anything is missing. ```rust device.fp_v1_0().destroy_device(...); ``` ### Support for extension names ```rust use ash::{ext, khr}; #[cfg(all(unix, not(target_os = "android")))] fn extension_names() -> Vec<*const i8> { vec![ khr::surface::NAME.as_ptr(), khr::xlib_surface::NAME.as_ptr(), ext::debug_utils::NAME.as_ptr(), ] } ``` ### Implicit handles Handles from Instance or Device are passed implicitly. ```rust pub fn create_command_pool(&self, create_info: &vk::CommandPoolCreateInfo<'_>) -> VkResult; let pool = device.create_command_pool(&pool_create_info).unwrap(); ``` ### Optional linking The default `loaded` cargo feature will dynamically load the default Vulkan library for the current platform with `Entry::load`, meaning that the build environment does not have to have Vulkan development packages installed. If, on the other hand, your application cannot handle Vulkan being missing at runtime, you can instead enable the `linked` feature, which will link your binary with the Vulkan loader directly and expose the infallible `Entry::linked`. ### Use in `no_std` environments Ash can be used in `no_std` environments (with `alloc`) by disabling the `std` feature. ## Example You can find the examples [here](https://github.com/ash-rs/ash/tree/master/ash-examples). All examples currently require: the LunarG Validation layers and a Vulkan library that is visible in your `PATH`. An easy way to get started is to use the [LunarG Vulkan SDK](https://lunarg.com/vulkan-sdk/) #### Windows Make sure that you have a Vulkan ready driver and install the [LunarG Vulkan SDK](https://lunarg.com/vulkan-sdk/). #### Linux Install a Vulkan driver for your graphics hardware of choice, and (optionally) the [Validation Layers](https://github.com/KhronosGroup/Vulkan-ValidationLayers) via your package manager: - Arch Linux: https://wiki.archlinux.org/title/Vulkan. - Gentoo: https://wiki.gentoo.org/wiki/Vulkan. - Ubuntu/Debian: Besides installing a compatible graphics driver, install [`vulkan-validationlayers`](https://packages.ubuntu.com/vulkan-validationlayers) ([Debian](https://packages.debian.org/search?keywords=vulkan-validationlayers)) for the Validation Layers. - Other distros: consult your distro documentation and/or package repository for the preferred method to install and use Vulkan. #### macOS Install the [LunarG Vulkan SDK](https://lunarg.com/vulkan-sdk/). The installer puts the SDK in `$HOME/VulkanSDK/` by default. You will need to set the following environment variables when running cargo: ```sh VULKAN_SDK=$HOME/VulkanSDK//macOS \ DYLD_FALLBACK_LIBRARY_PATH=$VULKAN_SDK/lib \ VK_ICD_FILENAMES=$VULKAN_SDK/share/vulkan/icd.d/MoltenVK_icd.json \ VK_LAYER_PATH=$VULKAN_SDK/share/vulkan/explicit_layer.d \ cargo run ... ``` ### [Triangle](https://github.com/ash-rs/ash/blob/master/ash-examples/src/bin/triangle.rs) Displays a triangle with vertex colors. ```sh cargo run -p ash-examples --bin triangle ``` ![screenshot](https://i.imgur.com/PQZcL6w.jpg) ### [Texture](https://github.com/ash-rs/ash/blob/master/ash-examples/src/bin/texture.rs) Displays a texture on a quad. ```sh cargo run -p ash-examples --bin texture ``` ![texture](https://i.imgur.com/trow00H.png) ## Useful resources ### Examples - [vulkan-tutorial-rust](https://github.com/Usami-Renko/vulkan-tutorial-rust) - A port of [vulkan-tutorial.com](https://vulkan-tutorial.com). - [ash-sample-progression](https://github.com/bzm3r/ash-sample-progression) - A port of the LunarG examples. - [ash-nv-rt](https://github.com/gwihlidal/ash-nv-rt) A raytracing example for ash. ### Utility libraries - [vk-sync](https://github.com/gwihlidal/vk-sync-rs) - Simplified Vulkan synchronization logic, written in rust. - [vk-mem-rs](https://github.com/gwihlidal/vk-mem-rs) - This crate provides an FFI layer and idiomatic rust wrappers for the excellent AMD Vulkan Memory Allocator (VMA) C/C++ library. - [gpu-allocator](https://github.com/Traverse-Research/gpu-allocator) - GPU Memory allocator written in pure Rust for Vulkan and DirectX 12. - [lahar](https://github.com/Ralith/lahar) - Tools for asynchronously uploading data to a Vulkan device. ### Libraries that use ash - [gfx-rs](https://github.com/gfx-rs/gfx) - gfx-rs is a low-level, cross-platform graphics abstraction library in Rust. ## A thanks to - [Api with no secrets](https://software.intel.com/en-us/articles/api-without-secrets-introduction-to-vulkan-part-1) - [Vulkan tutorial](https://jhenriques.net/development.html) - [Vulkan examples](https://github.com/SaschaWillems/Vulkan) - [Vulkan tutorial](https://vulkan-tutorial.com/) - [Vulkano](https://github.com/vulkano-rs/vulkano) - [vk-rs](https://github.com/Osspial/vk-rs) ash-0.38.0+1.3.281/build.rs000064400000000000000000000014371046102023000127470ustar 00000000000000fn main() { #[cfg(feature = "linked")] { use std::env; let target_family = env::var("CARGO_CFG_TARGET_FAMILY").unwrap(); let target_pointer_width = env::var("CARGO_CFG_TARGET_POINTER_WIDTH").unwrap(); println!("cargo:rerun-if-env-changed=VULKAN_SDK"); if let Ok(var) = env::var("VULKAN_SDK") { let suffix = match (&*target_family, &*target_pointer_width) { ("windows", "32") => "Lib32", ("windows", "64") => "Lib", _ => "lib", }; println!("cargo:rustc-link-search={var}/{suffix}"); } let lib = match &*target_family { "windows" => "vulkan-1", _ => "vulkan", }; println!("cargo:rustc-link-lib={lib}"); } } ash-0.38.0+1.3.281/src/device.rs000064400000000000000000002704301046102023000136770ustar 00000000000000#![allow(clippy::trivially_copy_pass_by_ref)] use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::ffi; use core::mem; use core::ptr; /// #[derive(Clone)] pub struct Device { pub(crate) handle: vk::Device, pub(crate) device_fn_1_0: crate::DeviceFnV1_0, pub(crate) device_fn_1_1: crate::DeviceFnV1_1, pub(crate) device_fn_1_2: crate::DeviceFnV1_2, pub(crate) device_fn_1_3: crate::DeviceFnV1_3, } impl Device { pub unsafe fn load(instance_fn: &crate::InstanceFnV1_0, device: vk::Device) -> Self { Self::load_with( |name| mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr())), device, ) } pub unsafe fn load_with( mut load_fn: impl FnMut(&ffi::CStr) -> *const ffi::c_void, device: vk::Device, ) -> Self { Self::from_parts_1_3( device, crate::DeviceFnV1_0::load(&mut load_fn), crate::DeviceFnV1_1::load(&mut load_fn), crate::DeviceFnV1_2::load(&mut load_fn), crate::DeviceFnV1_3::load(&mut load_fn), ) } #[inline] pub fn from_parts_1_3( handle: vk::Device, device_fn_1_0: crate::DeviceFnV1_0, device_fn_1_1: crate::DeviceFnV1_1, device_fn_1_2: crate::DeviceFnV1_2, device_fn_1_3: crate::DeviceFnV1_3, ) -> Self { Self { handle, device_fn_1_0, device_fn_1_1, device_fn_1_2, device_fn_1_3, } } #[inline] pub fn handle(&self) -> vk::Device { self.handle } } /// Vulkan core 1.3 impl Device { #[inline] pub fn fp_v1_3(&self) -> &crate::DeviceFnV1_3 { &self.device_fn_1_3 } /// #[inline] pub unsafe fn create_private_data_slot( &self, create_info: &vk::PrivateDataSlotCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut private_data_slot = mem::MaybeUninit::uninit(); (self.device_fn_1_3.create_private_data_slot)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), private_data_slot.as_mut_ptr(), ) .assume_init_on_success(private_data_slot) } /// #[inline] pub unsafe fn destroy_private_data_slot( &self, private_data_slot: vk::PrivateDataSlot, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_3.destroy_private_data_slot)( self.handle, private_data_slot, allocation_callbacks.as_raw_ptr(), ) } /// #[inline] pub unsafe fn set_private_data( &self, object: T, private_data_slot: vk::PrivateDataSlot, data: u64, ) -> VkResult<()> { (self.device_fn_1_3.set_private_data)( self.handle, T::TYPE, object.as_raw(), private_data_slot, data, ) .result() } /// #[inline] pub unsafe fn get_private_data( &self, object: T, private_data_slot: vk::PrivateDataSlot, ) -> u64 { let mut data = mem::MaybeUninit::uninit(); (self.device_fn_1_3.get_private_data)( self.handle, T::TYPE, object.as_raw(), private_data_slot, data.as_mut_ptr(), ); data.assume_init() } /// #[inline] pub unsafe fn cmd_pipeline_barrier2( &self, command_buffer: vk::CommandBuffer, dependency_info: &vk::DependencyInfo<'_>, ) { (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info) } /// #[inline] pub unsafe fn cmd_reset_event2( &self, command_buffer: vk::CommandBuffer, event: vk::Event, stage_mask: vk::PipelineStageFlags2, ) { (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask) } /// #[inline] pub unsafe fn cmd_set_event2( &self, command_buffer: vk::CommandBuffer, event: vk::Event, dependency_info: &vk::DependencyInfo<'_>, ) { (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info) } /// #[inline] pub unsafe fn cmd_wait_events2( &self, command_buffer: vk::CommandBuffer, events: &[vk::Event], dependency_infos: &[vk::DependencyInfo<'_>], ) { assert_eq!(events.len(), dependency_infos.len()); (self.device_fn_1_3.cmd_wait_events2)( command_buffer, events.len() as u32, events.as_ptr(), dependency_infos.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_write_timestamp2( &self, command_buffer: vk::CommandBuffer, stage: vk::PipelineStageFlags2, query_pool: vk::QueryPool, query: u32, ) { (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query) } /// #[inline] pub unsafe fn queue_submit2( &self, queue: vk::Queue, submits: &[vk::SubmitInfo2<'_>], fence: vk::Fence, ) -> VkResult<()> { (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence) .result() } /// #[inline] pub unsafe fn cmd_copy_buffer2( &self, command_buffer: vk::CommandBuffer, copy_buffer_info: &vk::CopyBufferInfo2<'_>, ) { (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info) } /// #[inline] pub unsafe fn cmd_copy_image2( &self, command_buffer: vk::CommandBuffer, copy_image_info: &vk::CopyImageInfo2<'_>, ) { (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info) } /// #[inline] pub unsafe fn cmd_copy_buffer_to_image2( &self, command_buffer: vk::CommandBuffer, copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2<'_>, ) { (self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info) } /// #[inline] pub unsafe fn cmd_copy_image_to_buffer2( &self, command_buffer: vk::CommandBuffer, copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2<'_>, ) { (self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info) } /// #[inline] pub unsafe fn cmd_blit_image2( &self, command_buffer: vk::CommandBuffer, blit_image_info: &vk::BlitImageInfo2<'_>, ) { (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info) } /// #[inline] pub unsafe fn cmd_resolve_image2( &self, command_buffer: vk::CommandBuffer, resolve_image_info: &vk::ResolveImageInfo2<'_>, ) { (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info) } /// #[inline] pub unsafe fn cmd_begin_rendering( &self, command_buffer: vk::CommandBuffer, rendering_info: &vk::RenderingInfo<'_>, ) { (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info) } /// #[inline] pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) { (self.device_fn_1_3.cmd_end_rendering)(command_buffer) } /// #[inline] pub unsafe fn cmd_set_cull_mode( &self, command_buffer: vk::CommandBuffer, cull_mode: vk::CullModeFlags, ) { (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode) } /// #[inline] pub unsafe fn cmd_set_front_face( &self, command_buffer: vk::CommandBuffer, front_face: vk::FrontFace, ) { (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face) } /// #[inline] pub unsafe fn cmd_set_primitive_topology( &self, command_buffer: vk::CommandBuffer, primitive_topology: vk::PrimitiveTopology, ) { (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology) } /// #[inline] pub unsafe fn cmd_set_viewport_with_count( &self, command_buffer: vk::CommandBuffer, viewports: &[vk::Viewport], ) { (self.device_fn_1_3.cmd_set_viewport_with_count)( command_buffer, viewports.len() as u32, viewports.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_scissor_with_count( &self, command_buffer: vk::CommandBuffer, scissors: &[vk::Rect2D], ) { (self.device_fn_1_3.cmd_set_scissor_with_count)( command_buffer, scissors.len() as u32, scissors.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_bind_vertex_buffers2( &self, command_buffer: vk::CommandBuffer, first_binding: u32, buffers: &[vk::Buffer], offsets: &[vk::DeviceSize], sizes: Option<&[vk::DeviceSize]>, strides: Option<&[vk::DeviceSize]>, ) { assert_eq!(offsets.len(), buffers.len()); let p_sizes = if let Some(sizes) = sizes { assert_eq!(sizes.len(), buffers.len()); sizes.as_ptr() } else { ptr::null() }; let p_strides = if let Some(strides) = strides { assert_eq!(strides.len(), buffers.len()); strides.as_ptr() } else { ptr::null() }; (self.device_fn_1_3.cmd_bind_vertex_buffers2)( command_buffer, first_binding, buffers.len() as u32, buffers.as_ptr(), offsets.as_ptr(), p_sizes, p_strides, ) } /// #[inline] pub unsafe fn cmd_set_depth_test_enable( &self, command_buffer: vk::CommandBuffer, depth_test_enable: bool, ) { (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_write_enable( &self, command_buffer: vk::CommandBuffer, depth_write_enable: bool, ) { (self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_compare_op( &self, command_buffer: vk::CommandBuffer, depth_compare_op: vk::CompareOp, ) { (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op) } /// #[inline] pub unsafe fn cmd_set_depth_bounds_test_enable( &self, command_buffer: vk::CommandBuffer, depth_bounds_test_enable: bool, ) { (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)( command_buffer, depth_bounds_test_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_stencil_test_enable( &self, command_buffer: vk::CommandBuffer, stencil_test_enable: bool, ) { (self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_stencil_op( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, fail_op: vk::StencilOp, pass_op: vk::StencilOp, depth_fail_op: vk::StencilOp, compare_op: vk::CompareOp, ) { (self.device_fn_1_3.cmd_set_stencil_op)( command_buffer, face_mask, fail_op, pass_op, depth_fail_op, compare_op, ) } /// #[inline] pub unsafe fn cmd_set_rasterizer_discard_enable( &self, command_buffer: vk::CommandBuffer, rasterizer_discard_enable: bool, ) { (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)( command_buffer, rasterizer_discard_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_depth_bias_enable( &self, command_buffer: vk::CommandBuffer, depth_bias_enable: bool, ) { (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into()) } /// #[inline] pub unsafe fn cmd_set_primitive_restart_enable( &self, command_buffer: vk::CommandBuffer, primitive_restart_enable: bool, ) { (self.device_fn_1_3.cmd_set_primitive_restart_enable)( command_buffer, primitive_restart_enable.into(), ) } /// #[inline] pub unsafe fn get_device_buffer_memory_requirements( &self, memory_requirements: &vk::DeviceBufferMemoryRequirements<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.device_fn_1_3.get_device_buffer_memory_requirements)( self.handle, memory_requirements, out, ) } /// #[inline] pub unsafe fn get_device_image_memory_requirements( &self, memory_requirements: &vk::DeviceImageMemoryRequirements<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.device_fn_1_3.get_device_image_memory_requirements)( self.handle, memory_requirements, out, ) } /// Retrieve the number of elements to pass to [`get_device_image_sparse_memory_requirements()`][Self::get_device_image_sparse_memory_requirements()] #[inline] pub unsafe fn get_device_image_sparse_memory_requirements_len( &self, memory_requirements: &vk::DeviceImageMemoryRequirements<'_>, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self .device_fn_1_3 .get_device_image_sparse_memory_requirements)( self.handle, memory_requirements, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_device_image_sparse_memory_requirements_len()`][Self::get_device_image_sparse_memory_requirements_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_device_image_sparse_memory_requirements( &self, memory_requirements: &vk::DeviceImageMemoryRequirements<'_>, out: &mut [vk::SparseImageMemoryRequirements2<'_>], ) { let mut count = out.len() as u32; (self .device_fn_1_3 .get_device_image_sparse_memory_requirements)( self.handle, memory_requirements, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } } /// Vulkan core 1.2 impl Device { #[inline] pub fn fp_v1_2(&self) -> &crate::DeviceFnV1_2 { &self.device_fn_1_2 } /// #[inline] pub unsafe fn cmd_draw_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.device_fn_1_2.cmd_draw_indirect_count)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ); } /// #[inline] pub unsafe fn cmd_draw_indexed_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.device_fn_1_2.cmd_draw_indexed_indirect_count)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ); } /// #[inline] pub unsafe fn create_render_pass2( &self, create_info: &vk::RenderPassCreateInfo2<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut renderpass = mem::MaybeUninit::uninit(); (self.device_fn_1_2.create_render_pass2)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), renderpass.as_mut_ptr(), ) .assume_init_on_success(renderpass) } /// #[inline] pub unsafe fn cmd_begin_render_pass2( &self, command_buffer: vk::CommandBuffer, render_pass_begin_info: &vk::RenderPassBeginInfo<'_>, subpass_begin_info: &vk::SubpassBeginInfo<'_>, ) { (self.device_fn_1_2.cmd_begin_render_pass2)( command_buffer, render_pass_begin_info, subpass_begin_info, ); } /// #[inline] pub unsafe fn cmd_next_subpass2( &self, command_buffer: vk::CommandBuffer, subpass_begin_info: &vk::SubpassBeginInfo<'_>, subpass_end_info: &vk::SubpassEndInfo<'_>, ) { (self.device_fn_1_2.cmd_next_subpass2)( command_buffer, subpass_begin_info, subpass_end_info, ); } /// #[inline] pub unsafe fn cmd_end_render_pass2( &self, command_buffer: vk::CommandBuffer, subpass_end_info: &vk::SubpassEndInfo<'_>, ) { (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info); } /// #[inline] pub unsafe fn reset_query_pool( &self, query_pool: vk::QueryPool, first_query: u32, query_count: u32, ) { (self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count); } /// #[inline] pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult { let mut value = mem::MaybeUninit::uninit(); (self.device_fn_1_2.get_semaphore_counter_value)( self.handle(), semaphore, value.as_mut_ptr(), ) .assume_init_on_success(value) } /// #[inline] pub unsafe fn wait_semaphores( &self, wait_info: &vk::SemaphoreWaitInfo<'_>, timeout: u64, ) -> VkResult<()> { (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result() } /// #[inline] pub unsafe fn signal_semaphore( &self, signal_info: &vk::SemaphoreSignalInfo<'_>, ) -> VkResult<()> { (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result() } /// #[inline] pub unsafe fn get_buffer_device_address( &self, info: &vk::BufferDeviceAddressInfo<'_>, ) -> vk::DeviceAddress { (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info) } /// #[inline] pub unsafe fn get_buffer_opaque_capture_address( &self, info: &vk::BufferDeviceAddressInfo<'_>, ) -> u64 { (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info) } /// #[inline] pub unsafe fn get_device_memory_opaque_capture_address( &self, info: &vk::DeviceMemoryOpaqueCaptureAddressInfo<'_>, ) -> u64 { (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info) } } /// Vulkan core 1.1 impl Device { #[inline] pub fn fp_v1_1(&self) -> &crate::DeviceFnV1_1 { &self.device_fn_1_1 } /// #[inline] pub unsafe fn bind_buffer_memory2( &self, bind_infos: &[vk::BindBufferMemoryInfo<'_>], ) -> VkResult<()> { (self.device_fn_1_1.bind_buffer_memory2)( self.handle(), bind_infos.len() as _, bind_infos.as_ptr(), ) .result() } /// #[inline] pub unsafe fn bind_image_memory2( &self, bind_infos: &[vk::BindImageMemoryInfo<'_>], ) -> VkResult<()> { (self.device_fn_1_1.bind_image_memory2)( self.handle(), bind_infos.len() as _, bind_infos.as_ptr(), ) .result() } /// #[inline] pub unsafe fn get_device_group_peer_memory_features( &self, heap_index: u32, local_device_index: u32, remote_device_index: u32, ) -> vk::PeerMemoryFeatureFlags { let mut peer_memory_features = mem::MaybeUninit::uninit(); (self.device_fn_1_1.get_device_group_peer_memory_features)( self.handle(), heap_index, local_device_index, remote_device_index, peer_memory_features.as_mut_ptr(), ); peer_memory_features.assume_init() } /// #[inline] pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) { (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask); } /// #[inline] pub unsafe fn cmd_dispatch_base( &self, command_buffer: vk::CommandBuffer, base_group_x: u32, base_group_y: u32, base_group_z: u32, group_count_x: u32, group_count_y: u32, group_count_z: u32, ) { (self.device_fn_1_1.cmd_dispatch_base)( command_buffer, base_group_x, base_group_y, base_group_z, group_count_x, group_count_y, group_count_z, ); } /// #[inline] pub unsafe fn get_image_memory_requirements2( &self, info: &vk::ImageMemoryRequirementsInfo2<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out); } /// #[inline] pub unsafe fn get_buffer_memory_requirements2( &self, info: &vk::BufferMemoryRequirementsInfo2<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out); } /// Retrieve the number of elements to pass to [`get_image_sparse_memory_requirements2()`][Self::get_image_sparse_memory_requirements2()] #[inline] pub unsafe fn get_image_sparse_memory_requirements2_len( &self, info: &vk::ImageSparseMemoryRequirementsInfo2<'_>, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self.device_fn_1_1.get_image_sparse_memory_requirements2)( self.handle(), info, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_image_sparse_memory_requirements2_len()`][Self::get_image_sparse_memory_requirements2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_image_sparse_memory_requirements2( &self, info: &vk::ImageSparseMemoryRequirementsInfo2<'_>, out: &mut [vk::SparseImageMemoryRequirements2<'_>], ) { let mut count = out.len() as u32; (self.device_fn_1_1.get_image_sparse_memory_requirements2)( self.handle(), info, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } /// #[inline] pub unsafe fn trim_command_pool( &self, command_pool: vk::CommandPool, flags: vk::CommandPoolTrimFlags, ) { (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags); } /// #[inline] pub unsafe fn get_device_queue2(&self, queue_info: &vk::DeviceQueueInfo2<'_>) -> vk::Queue { let mut queue = mem::MaybeUninit::uninit(); (self.device_fn_1_1.get_device_queue2)(self.handle(), queue_info, queue.as_mut_ptr()); queue.assume_init() } /// #[inline] pub unsafe fn create_sampler_ycbcr_conversion( &self, create_info: &vk::SamplerYcbcrConversionCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut ycbcr_conversion = mem::MaybeUninit::uninit(); (self.device_fn_1_1.create_sampler_ycbcr_conversion)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), ycbcr_conversion.as_mut_ptr(), ) .assume_init_on_success(ycbcr_conversion) } /// #[inline] pub unsafe fn destroy_sampler_ycbcr_conversion( &self, ycbcr_conversion: vk::SamplerYcbcrConversion, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)( self.handle(), ycbcr_conversion, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn create_descriptor_update_template( &self, create_info: &vk::DescriptorUpdateTemplateCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut descriptor_update_template = mem::MaybeUninit::uninit(); (self.device_fn_1_1.create_descriptor_update_template)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), descriptor_update_template.as_mut_ptr(), ) .assume_init_on_success(descriptor_update_template) } /// #[inline] pub unsafe fn destroy_descriptor_update_template( &self, descriptor_update_template: vk::DescriptorUpdateTemplate, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_1.destroy_descriptor_update_template)( self.handle(), descriptor_update_template, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn update_descriptor_set_with_template( &self, descriptor_set: vk::DescriptorSet, descriptor_update_template: vk::DescriptorUpdateTemplate, data: *const ffi::c_void, ) { (self.device_fn_1_1.update_descriptor_set_with_template)( self.handle(), descriptor_set, descriptor_update_template, data, ); } /// #[inline] pub unsafe fn get_descriptor_set_layout_support( &self, create_info: &vk::DescriptorSetLayoutCreateInfo<'_>, out: &mut vk::DescriptorSetLayoutSupport<'_>, ) { (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out); } } /// Vulkan core 1.0 impl Device { #[inline] pub fn fp_v1_0(&self) -> &crate::DeviceFnV1_0 { &self.device_fn_1_0 } /// #[inline] pub unsafe fn destroy_device( &self, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn destroy_sampler( &self, sampler: vk::Sampler, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_sampler)( self.handle(), sampler, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn free_memory( &self, memory: vk::DeviceMemory, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn free_command_buffers( &self, command_pool: vk::CommandPool, command_buffers: &[vk::CommandBuffer], ) { (self.device_fn_1_0.free_command_buffers)( self.handle(), command_pool, command_buffers.len() as u32, command_buffers.as_ptr(), ); } /// #[inline] pub unsafe fn create_event( &self, create_info: &vk::EventCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut event = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_event)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), event.as_mut_ptr(), ) .assume_init_on_success(event) } /// Returns [`true`] if the event was set, and [`false`] if the event was reset, otherwise it will /// return the error code. /// #[inline] pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult { let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event); match err_code { vk::Result::EVENT_SET => Ok(true), vk::Result::EVENT_RESET => Ok(false), _ => Err(err_code), } } /// #[inline] pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> { (self.device_fn_1_0.set_event)(self.handle(), event).result() } /// #[inline] pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> { (self.device_fn_1_0.reset_event)(self.handle(), event).result() } /// #[inline] pub unsafe fn cmd_set_event( &self, command_buffer: vk::CommandBuffer, event: vk::Event, stage_mask: vk::PipelineStageFlags, ) { (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask); } /// #[inline] pub unsafe fn cmd_reset_event( &self, command_buffer: vk::CommandBuffer, event: vk::Event, stage_mask: vk::PipelineStageFlags, ) { (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask); } /// #[inline] pub unsafe fn cmd_wait_events( &self, command_buffer: vk::CommandBuffer, events: &[vk::Event], src_stage_mask: vk::PipelineStageFlags, dst_stage_mask: vk::PipelineStageFlags, memory_barriers: &[vk::MemoryBarrier<'_>], buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>], image_memory_barriers: &[vk::ImageMemoryBarrier<'_>], ) { (self.device_fn_1_0.cmd_wait_events)( command_buffer, events.len() as _, events.as_ptr(), src_stage_mask, dst_stage_mask, memory_barriers.len() as _, memory_barriers.as_ptr(), buffer_memory_barriers.len() as _, buffer_memory_barriers.as_ptr(), image_memory_barriers.len() as _, image_memory_barriers.as_ptr(), ); } /// #[inline] pub unsafe fn destroy_fence( &self, fence: vk::Fence, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn destroy_event( &self, event: vk::Event, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn destroy_image( &self, image: vk::Image, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn destroy_command_pool( &self, pool: vk::CommandPool, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_command_pool)( self.handle(), pool, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_image_view( &self, image_view: vk::ImageView, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_image_view)( self.handle(), image_view, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_render_pass( &self, renderpass: vk::RenderPass, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_render_pass)( self.handle(), renderpass, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_framebuffer( &self, framebuffer: vk::Framebuffer, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_framebuffer)( self.handle(), framebuffer, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_pipeline_layout( &self, pipeline_layout: vk::PipelineLayout, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_pipeline_layout)( self.handle(), pipeline_layout, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_pipeline_cache( &self, pipeline_cache: vk::PipelineCache, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_pipeline_cache)( self.handle(), pipeline_cache, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_buffer( &self, buffer: vk::Buffer, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_buffer)( self.handle(), buffer, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_shader_module( &self, shader: vk::ShaderModule, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_shader_module)( self.handle(), shader, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_pipeline( &self, pipeline: vk::Pipeline, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_pipeline)( self.handle(), pipeline, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_semaphore( &self, semaphore: vk::Semaphore, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_semaphore)( self.handle(), semaphore, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_descriptor_pool( &self, pool: vk::DescriptorPool, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_descriptor_pool)( self.handle(), pool, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_query_pool( &self, pool: vk::QueryPool, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_query_pool)( self.handle(), pool, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn destroy_descriptor_set_layout( &self, layout: vk::DescriptorSetLayout, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_descriptor_set_layout)( self.handle(), layout, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn free_descriptor_sets( &self, pool: vk::DescriptorPool, descriptor_sets: &[vk::DescriptorSet], ) -> VkResult<()> { (self.device_fn_1_0.free_descriptor_sets)( self.handle(), pool, descriptor_sets.len() as u32, descriptor_sets.as_ptr(), ) .result() } /// #[inline] pub unsafe fn update_descriptor_sets( &self, descriptor_writes: &[vk::WriteDescriptorSet<'_>], descriptor_copies: &[vk::CopyDescriptorSet<'_>], ) { (self.device_fn_1_0.update_descriptor_sets)( self.handle(), descriptor_writes.len() as u32, descriptor_writes.as_ptr(), descriptor_copies.len() as u32, descriptor_copies.as_ptr(), ); } /// #[inline] pub unsafe fn create_sampler( &self, create_info: &vk::SamplerCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut sampler = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_sampler)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), sampler.as_mut_ptr(), ) .assume_init_on_success(sampler) } /// #[inline] pub unsafe fn cmd_blit_image( &self, command_buffer: vk::CommandBuffer, src_image: vk::Image, src_image_layout: vk::ImageLayout, dst_image: vk::Image, dst_image_layout: vk::ImageLayout, regions: &[vk::ImageBlit], filter: vk::Filter, ) { (self.device_fn_1_0.cmd_blit_image)( command_buffer, src_image, src_image_layout, dst_image, dst_image_layout, regions.len() as _, regions.as_ptr(), filter, ); } /// #[inline] pub unsafe fn cmd_resolve_image( &self, command_buffer: vk::CommandBuffer, src_image: vk::Image, src_image_layout: vk::ImageLayout, dst_image: vk::Image, dst_image_layout: vk::ImageLayout, regions: &[vk::ImageResolve], ) { (self.device_fn_1_0.cmd_resolve_image)( command_buffer, src_image, src_image_layout, dst_image, dst_image_layout, regions.len() as u32, regions.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_fill_buffer( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, size: vk::DeviceSize, data: u32, ) { (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data); } /// #[inline] pub unsafe fn cmd_update_buffer( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, data: &[u8], ) { (self.device_fn_1_0.cmd_update_buffer)( command_buffer, buffer, offset, data.len() as u64, data.as_ptr() as _, ); } /// #[inline] pub unsafe fn cmd_copy_buffer( &self, command_buffer: vk::CommandBuffer, src_buffer: vk::Buffer, dst_buffer: vk::Buffer, regions: &[vk::BufferCopy], ) { (self.device_fn_1_0.cmd_copy_buffer)( command_buffer, src_buffer, dst_buffer, regions.len() as u32, regions.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_copy_image_to_buffer( &self, command_buffer: vk::CommandBuffer, src_image: vk::Image, src_image_layout: vk::ImageLayout, dst_buffer: vk::Buffer, regions: &[vk::BufferImageCopy], ) { (self.device_fn_1_0.cmd_copy_image_to_buffer)( command_buffer, src_image, src_image_layout, dst_buffer, regions.len() as u32, regions.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_copy_buffer_to_image( &self, command_buffer: vk::CommandBuffer, src_buffer: vk::Buffer, dst_image: vk::Image, dst_image_layout: vk::ImageLayout, regions: &[vk::BufferImageCopy], ) { (self.device_fn_1_0.cmd_copy_buffer_to_image)( command_buffer, src_buffer, dst_image, dst_image_layout, regions.len() as u32, regions.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_copy_image( &self, command_buffer: vk::CommandBuffer, src_image: vk::Image, src_image_layout: vk::ImageLayout, dst_image: vk::Image, dst_image_layout: vk::ImageLayout, regions: &[vk::ImageCopy], ) { (self.device_fn_1_0.cmd_copy_image)( command_buffer, src_image, src_image_layout, dst_image, dst_image_layout, regions.len() as u32, regions.as_ptr(), ); } /// #[inline] pub unsafe fn allocate_descriptor_sets( &self, allocate_info: &vk::DescriptorSetAllocateInfo<'_>, ) -> VkResult> { let mut desc_set = Vec::with_capacity(allocate_info.descriptor_set_count as usize); (self.device_fn_1_0.allocate_descriptor_sets)( self.handle(), allocate_info, desc_set.as_mut_ptr(), ) .set_vec_len_on_success(desc_set, allocate_info.descriptor_set_count as usize) } /// #[inline] pub unsafe fn create_descriptor_set_layout( &self, create_info: &vk::DescriptorSetLayoutCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut layout = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_descriptor_set_layout)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), layout.as_mut_ptr(), ) .assume_init_on_success(layout) } /// #[inline] pub unsafe fn device_wait_idle(&self) -> VkResult<()> { (self.device_fn_1_0.device_wait_idle)(self.handle()).result() } /// #[inline] pub unsafe fn create_descriptor_pool( &self, create_info: &vk::DescriptorPoolCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut pool = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_descriptor_pool)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), pool.as_mut_ptr(), ) .assume_init_on_success(pool) } /// #[inline] pub unsafe fn reset_descriptor_pool( &self, pool: vk::DescriptorPool, flags: vk::DescriptorPoolResetFlags, ) -> VkResult<()> { (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result() } /// #[inline] pub unsafe fn reset_command_pool( &self, command_pool: vk::CommandPool, flags: vk::CommandPoolResetFlags, ) -> VkResult<()> { (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result() } /// #[inline] pub unsafe fn reset_command_buffer( &self, command_buffer: vk::CommandBuffer, flags: vk::CommandBufferResetFlags, ) -> VkResult<()> { (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result() } /// #[inline] pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> { (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr()) .result() } /// #[inline] pub unsafe fn cmd_bind_index_buffer( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, index_type: vk::IndexType, ) { (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type); } /// #[inline] pub unsafe fn cmd_clear_color_image( &self, command_buffer: vk::CommandBuffer, image: vk::Image, image_layout: vk::ImageLayout, clear_color_value: &vk::ClearColorValue, ranges: &[vk::ImageSubresourceRange], ) { (self.device_fn_1_0.cmd_clear_color_image)( command_buffer, image, image_layout, clear_color_value, ranges.len() as u32, ranges.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_clear_depth_stencil_image( &self, command_buffer: vk::CommandBuffer, image: vk::Image, image_layout: vk::ImageLayout, clear_depth_stencil_value: &vk::ClearDepthStencilValue, ranges: &[vk::ImageSubresourceRange], ) { (self.device_fn_1_0.cmd_clear_depth_stencil_image)( command_buffer, image, image_layout, clear_depth_stencil_value, ranges.len() as u32, ranges.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_clear_attachments( &self, command_buffer: vk::CommandBuffer, attachments: &[vk::ClearAttachment], rects: &[vk::ClearRect], ) { (self.device_fn_1_0.cmd_clear_attachments)( command_buffer, attachments.len() as u32, attachments.as_ptr(), rects.len() as u32, rects.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_draw_indexed( &self, command_buffer: vk::CommandBuffer, index_count: u32, instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32, ) { (self.device_fn_1_0.cmd_draw_indexed)( command_buffer, index_count, instance_count, first_index, vertex_offset, first_instance, ); } /// #[inline] pub unsafe fn cmd_draw_indexed_indirect( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, draw_count: u32, stride: u32, ) { (self.device_fn_1_0.cmd_draw_indexed_indirect)( command_buffer, buffer, offset, draw_count, stride, ); } /// #[inline] pub unsafe fn cmd_execute_commands( &self, primary_command_buffer: vk::CommandBuffer, secondary_command_buffers: &[vk::CommandBuffer], ) { (self.device_fn_1_0.cmd_execute_commands)( primary_command_buffer, secondary_command_buffers.len() as u32, secondary_command_buffers.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_bind_descriptor_sets( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, layout: vk::PipelineLayout, first_set: u32, descriptor_sets: &[vk::DescriptorSet], dynamic_offsets: &[u32], ) { (self.device_fn_1_0.cmd_bind_descriptor_sets)( command_buffer, pipeline_bind_point, layout, first_set, descriptor_sets.len() as u32, descriptor_sets.as_ptr(), dynamic_offsets.len() as u32, dynamic_offsets.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_copy_query_pool_results( &self, command_buffer: vk::CommandBuffer, query_pool: vk::QueryPool, first_query: u32, query_count: u32, dst_buffer: vk::Buffer, dst_offset: vk::DeviceSize, stride: vk::DeviceSize, flags: vk::QueryResultFlags, ) { (self.device_fn_1_0.cmd_copy_query_pool_results)( command_buffer, query_pool, first_query, query_count, dst_buffer, dst_offset, stride, flags, ); } /// #[inline] pub unsafe fn cmd_push_constants( &self, command_buffer: vk::CommandBuffer, layout: vk::PipelineLayout, stage_flags: vk::ShaderStageFlags, offset: u32, constants: &[u8], ) { (self.device_fn_1_0.cmd_push_constants)( command_buffer, layout, stage_flags, offset, constants.len() as _, constants.as_ptr() as _, ); } /// #[inline] pub unsafe fn cmd_begin_render_pass( &self, command_buffer: vk::CommandBuffer, render_pass_begin: &vk::RenderPassBeginInfo<'_>, contents: vk::SubpassContents, ) { (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, render_pass_begin, contents); } /// #[inline] pub unsafe fn cmd_next_subpass( &self, command_buffer: vk::CommandBuffer, contents: vk::SubpassContents, ) { (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents); } /// #[inline] pub unsafe fn cmd_bind_pipeline( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, pipeline: vk::Pipeline, ) { (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline); } /// #[inline] pub unsafe fn cmd_set_scissor( &self, command_buffer: vk::CommandBuffer, first_scissor: u32, scissors: &[vk::Rect2D], ) { (self.device_fn_1_0.cmd_set_scissor)( command_buffer, first_scissor, scissors.len() as u32, scissors.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) { (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width); } /// #[inline] pub unsafe fn cmd_bind_vertex_buffers( &self, command_buffer: vk::CommandBuffer, first_binding: u32, buffers: &[vk::Buffer], offsets: &[vk::DeviceSize], ) { debug_assert_eq!(buffers.len(), offsets.len()); (self.device_fn_1_0.cmd_bind_vertex_buffers)( command_buffer, first_binding, buffers.len() as u32, buffers.as_ptr(), offsets.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) { (self.device_fn_1_0.cmd_end_render_pass)(command_buffer); } /// #[inline] pub unsafe fn cmd_draw( &self, command_buffer: vk::CommandBuffer, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32, ) { (self.device_fn_1_0.cmd_draw)( command_buffer, vertex_count, instance_count, first_vertex, first_instance, ); } /// #[inline] pub unsafe fn cmd_draw_indirect( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, draw_count: u32, stride: u32, ) { (self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride); } /// #[inline] pub unsafe fn cmd_dispatch( &self, command_buffer: vk::CommandBuffer, group_count_x: u32, group_count_y: u32, group_count_z: u32, ) { (self.device_fn_1_0.cmd_dispatch)( command_buffer, group_count_x, group_count_y, group_count_z, ); } /// #[inline] pub unsafe fn cmd_dispatch_indirect( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, ) { (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset); } /// #[inline] pub unsafe fn cmd_set_viewport( &self, command_buffer: vk::CommandBuffer, first_viewport: u32, viewports: &[vk::Viewport], ) { (self.device_fn_1_0.cmd_set_viewport)( command_buffer, first_viewport, viewports.len() as u32, viewports.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_set_depth_bias( &self, command_buffer: vk::CommandBuffer, constant_factor: f32, clamp: f32, slope_factor: f32, ) { (self.device_fn_1_0.cmd_set_depth_bias)( command_buffer, constant_factor, clamp, slope_factor, ); } /// #[inline] pub unsafe fn cmd_set_blend_constants( &self, command_buffer: vk::CommandBuffer, blend_constants: &[f32; 4], ) { (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants); } /// #[inline] pub unsafe fn cmd_set_depth_bounds( &self, command_buffer: vk::CommandBuffer, min_depth_bounds: f32, max_depth_bounds: f32, ) { (self.device_fn_1_0.cmd_set_depth_bounds)( command_buffer, min_depth_bounds, max_depth_bounds, ); } /// #[inline] pub unsafe fn cmd_set_stencil_compare_mask( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, compare_mask: u32, ) { (self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask); } /// #[inline] pub unsafe fn cmd_set_stencil_write_mask( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, write_mask: u32, ) { (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask); } /// #[inline] pub unsafe fn cmd_set_stencil_reference( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, reference: u32, ) { (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference); } /// #[inline] pub unsafe fn get_query_pool_results( &self, query_pool: vk::QueryPool, first_query: u32, data: &mut [T], flags: vk::QueryResultFlags, ) -> VkResult<()> { let data_size = mem::size_of_val(data); (self.device_fn_1_0.get_query_pool_results)( self.handle(), query_pool, first_query, data.len() as u32, data_size, data.as_mut_ptr().cast(), mem::size_of::() as _, flags, ) .result() } /// #[inline] pub unsafe fn cmd_begin_query( &self, command_buffer: vk::CommandBuffer, query_pool: vk::QueryPool, query: u32, flags: vk::QueryControlFlags, ) { (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags); } /// #[inline] pub unsafe fn cmd_end_query( &self, command_buffer: vk::CommandBuffer, query_pool: vk::QueryPool, query: u32, ) { (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query); } /// #[inline] pub unsafe fn cmd_reset_query_pool( &self, command_buffer: vk::CommandBuffer, pool: vk::QueryPool, first_query: u32, query_count: u32, ) { (self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count); } /// #[inline] pub unsafe fn cmd_write_timestamp( &self, command_buffer: vk::CommandBuffer, pipeline_stage: vk::PipelineStageFlags, query_pool: vk::QueryPool, query: u32, ) { (self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query); } /// #[inline] pub unsafe fn create_semaphore( &self, create_info: &vk::SemaphoreCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut semaphore = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_semaphore)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), semaphore.as_mut_ptr(), ) .assume_init_on_success(semaphore) } /// /// /// Pipelines are created and returned as described for [Multiple Pipeline Creation]. /// /// [Multiple Pipeline Creation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-multiple #[inline] pub unsafe fn create_graphics_pipelines( &self, pipeline_cache: vk::PipelineCache, create_infos: &[vk::GraphicsPipelineCreateInfo<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut pipelines = Vec::with_capacity(create_infos.len()); let err_code = (self.device_fn_1_0.create_graphics_pipelines)( self.handle(), pipeline_cache, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), pipelines.as_mut_ptr(), ); pipelines.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(pipelines), _ => Err((pipelines, err_code)), } } /// /// /// Pipelines are created and returned as described for [Multiple Pipeline Creation]. /// /// [Multiple Pipeline Creation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-multiple #[inline] pub unsafe fn create_compute_pipelines( &self, pipeline_cache: vk::PipelineCache, create_infos: &[vk::ComputePipelineCreateInfo<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut pipelines = Vec::with_capacity(create_infos.len()); let err_code = (self.device_fn_1_0.create_compute_pipelines)( self.handle(), pipeline_cache, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), pipelines.as_mut_ptr(), ); pipelines.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(pipelines), _ => Err((pipelines, err_code)), } } /// #[inline] pub unsafe fn create_buffer( &self, create_info: &vk::BufferCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut buffer = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_buffer)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), buffer.as_mut_ptr(), ) .assume_init_on_success(buffer) } /// #[inline] pub unsafe fn create_pipeline_layout( &self, create_info: &vk::PipelineLayoutCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut pipeline_layout = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_pipeline_layout)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), pipeline_layout.as_mut_ptr(), ) .assume_init_on_success(pipeline_layout) } /// #[inline] pub unsafe fn create_pipeline_cache( &self, create_info: &vk::PipelineCacheCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut pipeline_cache = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_pipeline_cache)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), pipeline_cache.as_mut_ptr(), ) .assume_init_on_success(pipeline_cache) } /// #[inline] pub unsafe fn get_pipeline_cache_data( &self, pipeline_cache: vk::PipelineCache, ) -> VkResult> { read_into_uninitialized_vector(|count, data: *mut u8| { (self.device_fn_1_0.get_pipeline_cache_data)( self.handle(), pipeline_cache, count, data.cast(), ) }) } /// #[inline] pub unsafe fn merge_pipeline_caches( &self, dst_cache: vk::PipelineCache, src_caches: &[vk::PipelineCache], ) -> VkResult<()> { (self.device_fn_1_0.merge_pipeline_caches)( self.handle(), dst_cache, src_caches.len() as u32, src_caches.as_ptr(), ) .result() } /// #[inline] pub unsafe fn map_memory( &self, memory: vk::DeviceMemory, offset: vk::DeviceSize, size: vk::DeviceSize, flags: vk::MemoryMapFlags, ) -> VkResult<*mut ffi::c_void> { let mut data = mem::MaybeUninit::uninit(); (self.device_fn_1_0.map_memory)( self.handle(), memory, offset, size, flags, data.as_mut_ptr(), ) .assume_init_on_success(data) } /// #[inline] pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) { (self.device_fn_1_0.unmap_memory)(self.handle(), memory); } /// #[inline] pub unsafe fn invalidate_mapped_memory_ranges( &self, ranges: &[vk::MappedMemoryRange<'_>], ) -> VkResult<()> { (self.device_fn_1_0.invalidate_mapped_memory_ranges)( self.handle(), ranges.len() as u32, ranges.as_ptr(), ) .result() } /// #[inline] pub unsafe fn flush_mapped_memory_ranges( &self, ranges: &[vk::MappedMemoryRange<'_>], ) -> VkResult<()> { (self.device_fn_1_0.flush_mapped_memory_ranges)( self.handle(), ranges.len() as u32, ranges.as_ptr(), ) .result() } /// #[inline] pub unsafe fn create_framebuffer( &self, create_info: &vk::FramebufferCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut framebuffer = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_framebuffer)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), framebuffer.as_mut_ptr(), ) .assume_init_on_success(framebuffer) } /// #[inline] pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue { let mut queue = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_device_queue)( self.handle(), queue_family_index, queue_index, queue.as_mut_ptr(), ); queue.assume_init() } /// #[inline] pub unsafe fn cmd_pipeline_barrier( &self, command_buffer: vk::CommandBuffer, src_stage_mask: vk::PipelineStageFlags, dst_stage_mask: vk::PipelineStageFlags, dependency_flags: vk::DependencyFlags, memory_barriers: &[vk::MemoryBarrier<'_>], buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>], image_memory_barriers: &[vk::ImageMemoryBarrier<'_>], ) { (self.device_fn_1_0.cmd_pipeline_barrier)( command_buffer, src_stage_mask, dst_stage_mask, dependency_flags, memory_barriers.len() as u32, memory_barriers.as_ptr(), buffer_memory_barriers.len() as u32, buffer_memory_barriers.as_ptr(), image_memory_barriers.len() as u32, image_memory_barriers.as_ptr(), ); } /// #[inline] pub unsafe fn create_render_pass( &self, create_info: &vk::RenderPassCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut renderpass = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_render_pass)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), renderpass.as_mut_ptr(), ) .assume_init_on_success(renderpass) } /// #[inline] pub unsafe fn begin_command_buffer( &self, command_buffer: vk::CommandBuffer, begin_info: &vk::CommandBufferBeginInfo<'_>, ) -> VkResult<()> { (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result() } /// #[inline] pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> { (self.device_fn_1_0.end_command_buffer)(command_buffer).result() } /// #[inline] pub unsafe fn wait_for_fences( &self, fences: &[vk::Fence], wait_all: bool, timeout: u64, ) -> VkResult<()> { (self.device_fn_1_0.wait_for_fences)( self.handle(), fences.len() as u32, fences.as_ptr(), wait_all as u32, timeout, ) .result() } /// #[inline] pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult { let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence); match err_code { vk::Result::SUCCESS => Ok(true), vk::Result::NOT_READY => Ok(false), _ => Err(err_code), } } /// #[inline] pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> { (self.device_fn_1_0.queue_wait_idle)(queue).result() } /// #[inline] pub unsafe fn queue_submit( &self, queue: vk::Queue, submits: &[vk::SubmitInfo<'_>], fence: vk::Fence, ) -> VkResult<()> { (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence) .result() } /// #[inline] pub unsafe fn queue_bind_sparse( &self, queue: vk::Queue, bind_info: &[vk::BindSparseInfo<'_>], fence: vk::Fence, ) -> VkResult<()> { (self.device_fn_1_0.queue_bind_sparse)( queue, bind_info.len() as u32, bind_info.as_ptr(), fence, ) .result() } /// #[inline] pub unsafe fn create_buffer_view( &self, create_info: &vk::BufferViewCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut buffer_view = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_buffer_view)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), buffer_view.as_mut_ptr(), ) .assume_init_on_success(buffer_view) } /// #[inline] pub unsafe fn destroy_buffer_view( &self, buffer_view: vk::BufferView, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.device_fn_1_0.destroy_buffer_view)( self.handle(), buffer_view, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn create_image_view( &self, create_info: &vk::ImageViewCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut image_view = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_image_view)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), image_view.as_mut_ptr(), ) .assume_init_on_success(image_view) } /// #[inline] pub unsafe fn allocate_command_buffers( &self, allocate_info: &vk::CommandBufferAllocateInfo<'_>, ) -> VkResult> { let mut buffers = Vec::with_capacity(allocate_info.command_buffer_count as usize); (self.device_fn_1_0.allocate_command_buffers)( self.handle(), allocate_info, buffers.as_mut_ptr(), ) .set_vec_len_on_success(buffers, allocate_info.command_buffer_count as usize) } /// #[inline] pub unsafe fn create_command_pool( &self, create_info: &vk::CommandPoolCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut pool = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_command_pool)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), pool.as_mut_ptr(), ) .assume_init_on_success(pool) } /// #[inline] pub unsafe fn create_query_pool( &self, create_info: &vk::QueryPoolCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut pool = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_query_pool)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), pool.as_mut_ptr(), ) .assume_init_on_success(pool) } /// #[inline] pub unsafe fn create_image( &self, create_info: &vk::ImageCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut image = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_image)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), image.as_mut_ptr(), ) .assume_init_on_success(image) } /// #[inline] pub unsafe fn get_image_subresource_layout( &self, image: vk::Image, subresource: vk::ImageSubresource, ) -> vk::SubresourceLayout { let mut layout = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_image_subresource_layout)( self.handle(), image, &subresource, layout.as_mut_ptr(), ); layout.assume_init() } /// #[inline] pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements { let mut mem_req = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_image_memory_requirements)( self.handle(), image, mem_req.as_mut_ptr(), ); mem_req.assume_init() } /// #[inline] pub unsafe fn get_buffer_memory_requirements( &self, buffer: vk::Buffer, ) -> vk::MemoryRequirements { let mut mem_req = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_buffer_memory_requirements)( self.handle(), buffer, mem_req.as_mut_ptr(), ); mem_req.assume_init() } /// #[inline] pub unsafe fn allocate_memory( &self, allocate_info: &vk::MemoryAllocateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut memory = mem::MaybeUninit::uninit(); (self.device_fn_1_0.allocate_memory)( self.handle(), allocate_info, allocation_callbacks.as_raw_ptr(), memory.as_mut_ptr(), ) .assume_init_on_success(memory) } /// #[inline] pub unsafe fn create_shader_module( &self, create_info: &vk::ShaderModuleCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut shader = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_shader_module)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), shader.as_mut_ptr(), ) .assume_init_on_success(shader) } /// #[inline] pub unsafe fn create_fence( &self, create_info: &vk::FenceCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut fence = mem::MaybeUninit::uninit(); (self.device_fn_1_0.create_fence)( self.handle(), create_info, allocation_callbacks.as_raw_ptr(), fence.as_mut_ptr(), ) .assume_init_on_success(fence) } /// #[inline] pub unsafe fn bind_buffer_memory( &self, buffer: vk::Buffer, device_memory: vk::DeviceMemory, offset: vk::DeviceSize, ) -> VkResult<()> { (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset) .result() } /// #[inline] pub unsafe fn bind_image_memory( &self, image: vk::Image, device_memory: vk::DeviceMemory, offset: vk::DeviceSize, ) -> VkResult<()> { (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result() } /// #[inline] pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D { let mut granularity = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_render_area_granularity)( self.handle(), render_pass, granularity.as_mut_ptr(), ); granularity.assume_init() } /// #[inline] pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize { let mut committed_memory_in_bytes = mem::MaybeUninit::uninit(); (self.device_fn_1_0.get_device_memory_commitment)( self.handle(), memory, committed_memory_in_bytes.as_mut_ptr(), ); committed_memory_in_bytes.assume_init() } /// #[inline] pub unsafe fn get_image_sparse_memory_requirements( &self, image: vk::Image, ) -> Vec { read_into_uninitialized_vector(|count, data| { (self.device_fn_1_0.get_image_sparse_memory_requirements)( self.handle(), image, count, data, ); vk::Result::SUCCESS }) // The closure always returns SUCCESS .unwrap() } } ash-0.38.0+1.3.281/src/entry.rs000064400000000000000000000326201046102023000135760ustar 00000000000000use crate::instance::Instance; #[cfg(doc)] use crate::khr; use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::ffi; use core::fmt; use core::mem; use core::ptr; #[cfg(feature = "loaded")] use libloading::Library; /// Holds the Vulkan functions independent of a particular instance #[derive(Clone)] pub struct Entry { static_fn: crate::StaticFn, entry_fn_1_0: crate::EntryFnV1_0, entry_fn_1_1: crate::EntryFnV1_1, #[cfg(feature = "loaded")] _lib_guard: Option>, } /// Vulkan core 1.0 impl Entry { /// Load default Vulkan library for the current platform /// /// Prefer this over [`linked()`][Self::linked()] when your application can gracefully handle /// environments that lack Vulkan support, and when the build environment might not have Vulkan /// development packages installed (e.g. the Vulkan SDK, or Ubuntu's `libvulkan-dev`). /// /// # Safety /// /// `dlopen`ing native libraries is inherently unsafe. The safety guidelines /// for [`Library::new()`] and [`Library::get()`] apply here. /// /// No Vulkan functions loaded directly or indirectly from this [`Entry`] /// may be called after it is [dropped][drop()]. /// /// # Example /// /// ```no_run /// use ash::{vk, Entry}; /// # fn main() -> Result<(), Box> { /// let entry = unsafe { Entry::load()? }; /// let app_info = vk::ApplicationInfo { /// api_version: vk::make_api_version(0, 1, 0, 0), /// ..Default::default() /// }; /// let create_info = vk::InstanceCreateInfo { /// p_application_info: &app_info, /// ..Default::default() /// }; /// let instance = unsafe { entry.create_instance(&create_info, None)? }; /// # Ok(()) } /// ``` #[cfg(feature = "loaded")] #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))] pub unsafe fn load() -> Result { #[cfg(windows)] const LIB_PATH: &str = "vulkan-1.dll"; #[cfg(all( unix, not(any( target_os = "macos", target_os = "ios", target_os = "android", target_os = "fuchsia" )) ))] const LIB_PATH: &str = "libvulkan.so.1"; #[cfg(any(target_os = "android", target_os = "fuchsia"))] const LIB_PATH: &str = "libvulkan.so"; #[cfg(any(target_os = "macos", target_os = "ios"))] const LIB_PATH: &str = "libvulkan.dylib"; Self::load_from(LIB_PATH) } /// Load entry points from a Vulkan loader linked at compile time /// /// Compared to [`load()`][Self::load()], this is infallible, but requires that the build /// environment have Vulkan development packages installed (e.g. the Vulkan SDK, or Ubuntu's /// `libvulkan-dev`), and prevents the resulting binary from starting in environments that do not /// support Vulkan. /// /// Note that instance/device functions are still fetched via `vkGetInstanceProcAddr` and /// `vkGetDeviceProcAddr` for maximum performance. /// /// Any Vulkan function acquired directly or indirectly from this [`Entry`] may be called after it /// is [dropped][drop()]. /// /// # Example /// /// ```no_run /// use ash::{vk, Entry}; /// # fn main() -> Result<(), Box> { /// let entry = Entry::linked(); /// let app_info = vk::ApplicationInfo { /// api_version: vk::make_api_version(0, 1, 0, 0), /// ..Default::default() /// }; /// let create_info = vk::InstanceCreateInfo { /// p_application_info: &app_info, /// ..Default::default() /// }; /// let instance = unsafe { entry.create_instance(&create_info, None)? }; /// # Ok(()) } /// ``` #[cfg(feature = "linked")] #[cfg_attr(docsrs, doc(cfg(feature = "linked")))] pub fn linked() -> Self { // Sound because we're linking to Vulkan, which provides a vkGetInstanceProcAddr that has // defined behavior in this use. unsafe { Self::from_static_fn(crate::StaticFn { get_instance_proc_addr: vkGetInstanceProcAddr, }) } } /// Load Vulkan library at `path` /// /// # Safety /// /// `dlopen`ing native libraries is inherently unsafe. The safety guidelines /// for [`Library::new()`] and [`Library::get()`] apply here. /// /// No Vulkan functions loaded directly or indirectly from this [`Entry`] /// may be called after it is [dropped][drop()]. #[cfg(feature = "loaded")] #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))] pub unsafe fn load_from(path: impl AsRef) -> Result { let lib = Library::new(path) .map_err(LoadingError::LibraryLoadFailure) .map(alloc::sync::Arc::new)?; let static_fn = crate::StaticFn::load_checked(|name| { lib.get(name.to_bytes_with_nul()) .map(|symbol| *symbol) .unwrap_or(ptr::null_mut()) })?; Ok(Self { _lib_guard: Some(lib), ..Self::from_static_fn(static_fn) }) } /// Load entry points based on an already-loaded [`crate::StaticFn`] /// /// # Safety /// /// `static_fn` must contain valid function pointers that comply with the semantics specified /// by Vulkan 1.0, which must remain valid for at least the lifetime of the returned [`Entry`]. pub unsafe fn from_static_fn(static_fn: crate::StaticFn) -> Self { let load_fn = move |name: &ffi::CStr| { mem::transmute((static_fn.get_instance_proc_addr)( vk::Instance::null(), name.as_ptr(), )) }; Self::from_parts_1_1( static_fn, crate::EntryFnV1_0::load(load_fn), crate::EntryFnV1_1::load(load_fn), ) } #[inline] pub fn from_parts_1_1( static_fn: crate::StaticFn, entry_fn_1_0: crate::EntryFnV1_0, entry_fn_1_1: crate::EntryFnV1_1, ) -> Self { Self { static_fn, entry_fn_1_0, entry_fn_1_1, #[cfg(feature = "loaded")] _lib_guard: None, } } #[inline] pub fn fp_v1_0(&self) -> &crate::EntryFnV1_0 { &self.entry_fn_1_0 } #[inline] pub fn static_fn(&self) -> &crate::StaticFn { &self.static_fn } /// /// /// # Example /// /// ```no_run /// # use ash::{Entry, vk}; /// # fn main() -> Result<(), Box> { /// let entry = Entry::linked(); /// match unsafe { entry.try_enumerate_instance_version() }? { /// // Vulkan 1.1+ /// Some(version) => { /// let major = vk::version_major(version); /// let minor = vk::version_minor(version); /// let patch = vk::version_patch(version); /// }, /// // Vulkan 1.0 /// None => {}, /// } /// # Ok(()) } /// ``` #[inline] pub unsafe fn try_enumerate_instance_version(&self) -> VkResult> { let enumerate_instance_version: Option = { let name = ffi::CStr::from_bytes_with_nul_unchecked(b"vkEnumerateInstanceVersion\0"); mem::transmute((self.static_fn.get_instance_proc_addr)( vk::Instance::null(), name.as_ptr(), )) }; if let Some(enumerate_instance_version) = enumerate_instance_version { let mut api_version = mem::MaybeUninit::uninit(); (enumerate_instance_version)(api_version.as_mut_ptr()) .assume_init_on_success(api_version) .map(Some) } else { Ok(None) } } /// /// /// # Safety /// /// The resulting [`Instance`] and any function-pointer objects (e.g. [`Device`][crate::Device] /// and extensions like [`khr::swapchain::Device`]) loaded from it may not be used after /// this [`Entry`] object is dropped, unless it was crated using [`Entry::linked()`] or /// [`Entry::from_parts_1_1()`]. /// /// [`Instance`] does _not_ implement [drop][drop()] semantics and can only be destroyed via /// [`destroy_instance()`][Instance::destroy_instance()]. #[inline] pub unsafe fn create_instance( &self, create_info: &vk::InstanceCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut instance = mem::MaybeUninit::uninit(); let instance = (self.entry_fn_1_0.create_instance)( create_info, allocation_callbacks.as_raw_ptr(), instance.as_mut_ptr(), ) .assume_init_on_success(instance)?; Ok(Instance::load(&self.static_fn, instance)) } /// #[inline] pub unsafe fn enumerate_instance_layer_properties(&self) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.entry_fn_1_0.enumerate_instance_layer_properties)(count, data) }) } /// #[inline] pub unsafe fn enumerate_instance_extension_properties( &self, layer_name: Option<&ffi::CStr>, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.entry_fn_1_0.enumerate_instance_extension_properties)( layer_name.map_or(ptr::null(), |str| str.as_ptr()), count, data, ) }) } /// #[inline] pub unsafe fn get_instance_proc_addr( &self, instance: vk::Instance, p_name: *const ffi::c_char, ) -> vk::PFN_vkVoidFunction { (self.static_fn.get_instance_proc_addr)(instance, p_name) } } /// Vulkan core 1.1 impl Entry { #[inline] pub fn fp_v1_1(&self) -> &crate::EntryFnV1_1 { &self.entry_fn_1_1 } #[deprecated = "This function is unavailable and therefore panics on Vulkan 1.0, please use `try_enumerate_instance_version()` instead"] /// /// /// Please use [`try_enumerate_instance_version()`][Self::try_enumerate_instance_version()] instead. #[inline] pub unsafe fn enumerate_instance_version(&self) -> VkResult { let mut api_version = mem::MaybeUninit::uninit(); (self.entry_fn_1_1.enumerate_instance_version)(api_version.as_mut_ptr()) .assume_init_on_success(api_version) } } #[cfg(feature = "linked")] #[cfg_attr(docsrs, doc(cfg(feature = "linked")))] impl Default for Entry { #[inline] fn default() -> Self { Self::linked() } } impl crate::StaticFn { pub fn load_checked(mut _f: F) -> Result where F: FnMut(&ffi::CStr) -> *const ffi::c_void, { Ok(Self { get_instance_proc_addr: unsafe { let cname = ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetInstanceProcAddr\0"); let val = _f(cname); if val.is_null() { return Err(MissingEntryPoint); } else { mem::transmute(val) } }, }) } } #[derive(Clone, Debug)] pub struct MissingEntryPoint; impl fmt::Display for MissingEntryPoint { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Cannot load `vkGetInstanceProcAddr` symbol from library") } } #[cfg(feature = "std")] // TODO: implement when error_in_core is stabilized impl std::error::Error for MissingEntryPoint {} #[cfg(feature = "linked")] extern "system" { fn vkGetInstanceProcAddr( instance: vk::Instance, name: *const ffi::c_char, ) -> vk::PFN_vkVoidFunction; } #[cfg(feature = "loaded")] mod loaded { use super::*; #[derive(Debug)] #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))] pub enum LoadingError { LibraryLoadFailure(libloading::Error), MissingEntryPoint(MissingEntryPoint), } impl fmt::Display for LoadingError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::LibraryLoadFailure(err) => fmt::Display::fmt(err, f), Self::MissingEntryPoint(err) => fmt::Display::fmt(err, f), } } } #[cfg(feature = "std")] impl std::error::Error for LoadingError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { Some(match self { Self::LibraryLoadFailure(err) => err, Self::MissingEntryPoint(err) => err, }) } } impl From for LoadingError { fn from(err: MissingEntryPoint) -> Self { Self::MissingEntryPoint(err) } } } #[cfg(feature = "loaded")] pub use self::loaded::*; ash-0.38.0+1.3.281/src/extensions/amd/buffer_marker.rs000064400000000000000000000013101046102023000201770ustar 00000000000000//! use crate::vk; impl crate::amd::buffer_marker::Device { /// #[inline] pub unsafe fn cmd_write_buffer_marker( &self, command_buffer: vk::CommandBuffer, pipeline_stage: vk::PipelineStageFlags, dst_buffer: vk::Buffer, dst_offset: vk::DeviceSize, marker: u32, ) { (self.fp.cmd_write_buffer_marker_amd)( command_buffer, pipeline_stage, dst_buffer, dst_offset, marker, ) } } ash-0.38.0+1.3.281/src/extensions/amd/mod.rs000064400000000000000000000000541046102023000161500ustar 00000000000000pub mod buffer_marker; pub mod shader_info; ash-0.38.0+1.3.281/src/extensions/amd/shader_info.rs000064400000000000000000000044131046102023000176550ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::amd::shader_info::Device { /// with [`vk::ShaderInfoTypeAMD::STATISTICS`] #[inline] pub unsafe fn get_shader_info_statistics( &self, pipeline: vk::Pipeline, shader_stage: vk::ShaderStageFlags, ) -> VkResult { let mut info = mem::MaybeUninit::::uninit(); let mut size = mem::size_of_val(&info); (self.fp.get_shader_info_amd)( self.handle, pipeline, shader_stage, vk::ShaderInfoTypeAMD::STATISTICS, &mut size, info.as_mut_ptr().cast(), ) .result()?; assert_eq!(size, mem::size_of_val(&info)); Ok(info.assume_init()) } /// with [`vk::ShaderInfoTypeAMD::BINARY`] #[inline] pub unsafe fn get_shader_info_binary( &self, pipeline: vk::Pipeline, shader_stage: vk::ShaderStageFlags, ) -> VkResult> { read_into_uninitialized_vector(|count, data: *mut u8| { (self.fp.get_shader_info_amd)( self.handle, pipeline, shader_stage, vk::ShaderInfoTypeAMD::BINARY, count, data.cast(), ) }) } /// with [`vk::ShaderInfoTypeAMD::DISASSEMBLY`] #[inline] pub unsafe fn get_shader_info_disassembly( &self, pipeline: vk::Pipeline, shader_stage: vk::ShaderStageFlags, ) -> VkResult> { read_into_uninitialized_vector(|count, data: *mut u8| { (self.fp.get_shader_info_amd)( self.handle, pipeline, shader_stage, vk::ShaderInfoTypeAMD::DISASSEMBLY, count, data.cast(), ) }) } } ash-0.38.0+1.3.281/src/extensions/amdx/mod.rs000064400000000000000000000000301046102023000163320ustar 00000000000000pub mod shader_enqueue; ash-0.38.0+1.3.281/src/extensions/amdx/shader_enqueue.rs000064400000000000000000000102451046102023000205610ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::amdx::shader_enqueue::Device { /// /// /// Pipelines are created and returned as described for [Multiple Pipeline Creation]. /// /// [Multiple Pipeline Creation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-multiple #[inline] pub unsafe fn create_execution_graph_pipelines( &self, pipeline_cache: vk::PipelineCache, create_infos: &[vk::ExecutionGraphPipelineCreateInfoAMDX<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut pipelines = Vec::with_capacity(create_infos.len()); let err_code = (self.fp.create_execution_graph_pipelines_amdx)( self.handle, pipeline_cache, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), pipelines.as_mut_ptr(), ); pipelines.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(pipelines), _ => Err((pipelines, err_code)), } } /// #[inline] pub unsafe fn get_execution_graph_pipeline_scratch_size( &self, execution_graph: vk::Pipeline, size_info: &mut vk::ExecutionGraphPipelineScratchSizeAMDX<'_>, ) -> VkResult<()> { (self.fp.get_execution_graph_pipeline_scratch_size_amdx)( self.handle, execution_graph, size_info, ) .result() } /// #[inline] pub unsafe fn get_execution_graph_pipeline_node_index( &self, execution_graph: vk::Pipeline, node_info: &vk::PipelineShaderStageNodeCreateInfoAMDX<'_>, ) -> VkResult { let mut node_index = mem::MaybeUninit::uninit(); (self.fp.get_execution_graph_pipeline_node_index_amdx)( self.handle, execution_graph, node_info, node_index.as_mut_ptr(), ) .assume_init_on_success(node_index) } /// #[inline] pub unsafe fn cmd_initialize_graph_scratch_memory( &self, command_buffer: vk::CommandBuffer, scratch: vk::DeviceAddress, ) { (self.fp.cmd_initialize_graph_scratch_memory_amdx)(command_buffer, scratch) } /// #[inline] pub unsafe fn cmd_dispatch_graph( &self, command_buffer: vk::CommandBuffer, scratch: vk::DeviceAddress, count_info: &vk::DispatchGraphCountInfoAMDX, ) { (self.fp.cmd_dispatch_graph_amdx)(command_buffer, scratch, count_info) } /// #[inline] pub unsafe fn cmd_dispatch_graph_indirect( &self, command_buffer: vk::CommandBuffer, scratch: vk::DeviceAddress, count_info: &vk::DispatchGraphCountInfoAMDX, ) { (self.fp.cmd_dispatch_graph_indirect_amdx)(command_buffer, scratch, count_info) } /// #[inline] pub unsafe fn cmd_dispatch_graph_indirect_count( &self, command_buffer: vk::CommandBuffer, scratch: vk::DeviceAddress, count_info: vk::DeviceAddress, ) { (self.fp.cmd_dispatch_graph_indirect_count_amdx)(command_buffer, scratch, count_info) } } ash-0.38.0+1.3.281/src/extensions/android/external_memory_android_hardware_buffer.rs000064400000000000000000000023501046102023000263710ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::android::external_memory_android_hardware_buffer::Device { /// #[inline] pub unsafe fn get_android_hardware_buffer_properties( &self, buffer: *const vk::AHardwareBuffer, properties: &mut vk::AndroidHardwareBufferPropertiesANDROID<'_>, ) -> VkResult<()> { (self.fp.get_android_hardware_buffer_properties_android)(self.handle, buffer, properties) .result() } /// #[inline] pub unsafe fn get_memory_android_hardware_buffer( &self, info: &vk::MemoryGetAndroidHardwareBufferInfoANDROID<'_>, ) -> VkResult<*mut vk::AHardwareBuffer> { let mut buffer = mem::MaybeUninit::uninit(); (self.fp.get_memory_android_hardware_buffer_android)(self.handle, info, buffer.as_mut_ptr()) .assume_init_on_success(buffer) } } ash-0.38.0+1.3.281/src/extensions/android/mod.rs000064400000000000000000000000611046102023000170250ustar 00000000000000pub mod external_memory_android_hardware_buffer; ash-0.38.0+1.3.281/src/extensions/ext/acquire_drm_display.rs000064400000000000000000000021061046102023000214500ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::ext::acquire_drm_display::Instance { /// #[inline] pub unsafe fn acquire_drm_display( &self, physical_device: vk::PhysicalDevice, drm_fd: i32, display: vk::DisplayKHR, ) -> VkResult<()> { (self.fp.acquire_drm_display_ext)(physical_device, drm_fd, display).result() } /// #[inline] pub unsafe fn get_drm_display( &self, physical_device: vk::PhysicalDevice, drm_fd: i32, connector_id: u32, ) -> VkResult { let mut display = mem::MaybeUninit::uninit(); (self.fp.get_drm_display_ext)(physical_device, drm_fd, connector_id, display.as_mut_ptr()) .assume_init_on_success(display) } } ash-0.38.0+1.3.281/src/extensions/ext/buffer_device_address.rs000064400000000000000000000007761046102023000217400ustar 00000000000000//! use crate::vk; impl crate::ext::buffer_device_address::Device { /// #[inline] pub unsafe fn get_buffer_device_address( &self, info: &vk::BufferDeviceAddressInfoEXT<'_>, ) -> vk::DeviceAddress { (self.fp.get_buffer_device_address_ext)(self.handle, info) } } ash-0.38.0+1.3.281/src/extensions/ext/calibrated_timestamps.rs000064400000000000000000000032171046102023000217740ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::ext::calibrated_timestamps::Device { /// /// /// Returns a tuple containing `(timestamps, max_deviation)` #[inline] pub unsafe fn get_calibrated_timestamps( &self, info: &[vk::CalibratedTimestampInfoEXT<'_>], ) -> VkResult<(Vec, u64)> { let mut timestamps = Vec::with_capacity(info.len()); let mut max_deviation = mem::MaybeUninit::uninit(); let max_deviation = (self.fp.get_calibrated_timestamps_ext)( self.handle, info.len() as u32, info.as_ptr(), timestamps.as_mut_ptr(), max_deviation.as_mut_ptr(), ) .assume_init_on_success(max_deviation)?; timestamps.set_len(info.len()); Ok((timestamps, max_deviation)) } } impl crate::ext::calibrated_timestamps::Instance { /// #[inline] pub unsafe fn get_physical_device_calibrateable_time_domains( &self, physical_device: vk::PhysicalDevice, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_calibrateable_time_domains_ext)( physical_device, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/ext/debug_marker.rs000075500000000000000000000030061046102023000200620ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::ext::debug_marker::Device { /// #[inline] pub unsafe fn debug_marker_set_object_name( &self, name_info: &vk::DebugMarkerObjectNameInfoEXT<'_>, ) -> VkResult<()> { (self.fp.debug_marker_set_object_name_ext)(self.handle, name_info).result() } /// #[inline] pub unsafe fn cmd_debug_marker_begin( &self, command_buffer: vk::CommandBuffer, marker_info: &vk::DebugMarkerMarkerInfoEXT<'_>, ) { (self.fp.cmd_debug_marker_begin_ext)(command_buffer, marker_info); } /// #[inline] pub unsafe fn cmd_debug_marker_end(&self, command_buffer: vk::CommandBuffer) { (self.fp.cmd_debug_marker_end_ext)(command_buffer); } /// #[inline] pub unsafe fn cmd_debug_marker_insert( &self, command_buffer: vk::CommandBuffer, marker_info: &vk::DebugMarkerMarkerInfoEXT<'_>, ) { (self.fp.cmd_debug_marker_insert_ext)(command_buffer, marker_info); } } ash-0.38.0+1.3.281/src/extensions/ext/debug_report.rs000075500000000000000000000025111046102023000201140ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::ext::debug_report::Instance { /// #[inline] pub unsafe fn destroy_debug_report_callback( &self, debug: vk::DebugReportCallbackEXT, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_debug_report_callback_ext)( self.handle, debug, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn create_debug_report_callback( &self, create_info: &vk::DebugReportCallbackCreateInfoEXT<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut debug_cb = mem::MaybeUninit::uninit(); (self.fp.create_debug_report_callback_ext)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), debug_cb.as_mut_ptr(), ) .assume_init_on_success(debug_cb) } } ash-0.38.0+1.3.281/src/extensions/ext/debug_utils.rs000075500000000000000000000106731046102023000177510ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::ext::debug_utils::Device { /// #[inline] pub unsafe fn set_debug_utils_object_name( &self, name_info: &vk::DebugUtilsObjectNameInfoEXT<'_>, ) -> VkResult<()> { (self.fp.set_debug_utils_object_name_ext)(self.handle, name_info).result() } /// #[inline] pub unsafe fn set_debug_utils_object_tag( &self, tag_info: &vk::DebugUtilsObjectTagInfoEXT<'_>, ) -> VkResult<()> { (self.fp.set_debug_utils_object_tag_ext)(self.handle, tag_info).result() } /// #[inline] pub unsafe fn cmd_begin_debug_utils_label( &self, command_buffer: vk::CommandBuffer, label: &vk::DebugUtilsLabelEXT<'_>, ) { (self.fp.cmd_begin_debug_utils_label_ext)(command_buffer, label); } /// #[inline] pub unsafe fn cmd_end_debug_utils_label(&self, command_buffer: vk::CommandBuffer) { (self.fp.cmd_end_debug_utils_label_ext)(command_buffer); } /// #[inline] pub unsafe fn cmd_insert_debug_utils_label( &self, command_buffer: vk::CommandBuffer, label: &vk::DebugUtilsLabelEXT<'_>, ) { (self.fp.cmd_insert_debug_utils_label_ext)(command_buffer, label); } /// #[inline] pub unsafe fn queue_begin_debug_utils_label( &self, queue: vk::Queue, label: &vk::DebugUtilsLabelEXT<'_>, ) { (self.fp.queue_begin_debug_utils_label_ext)(queue, label); } /// #[inline] pub unsafe fn queue_end_debug_utils_label(&self, queue: vk::Queue) { (self.fp.queue_end_debug_utils_label_ext)(queue); } /// #[inline] pub unsafe fn queue_insert_debug_utils_label( &self, queue: vk::Queue, label: &vk::DebugUtilsLabelEXT<'_>, ) { (self.fp.queue_insert_debug_utils_label_ext)(queue, label); } } impl crate::ext::debug_utils::Instance { /// #[inline] pub unsafe fn create_debug_utils_messenger( &self, create_info: &vk::DebugUtilsMessengerCreateInfoEXT<'_>, allocator: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut messenger = mem::MaybeUninit::uninit(); (self.fp.create_debug_utils_messenger_ext)( self.handle, create_info, allocator.as_raw_ptr(), messenger.as_mut_ptr(), ) .assume_init_on_success(messenger) } /// #[inline] pub unsafe fn destroy_debug_utils_messenger( &self, messenger: vk::DebugUtilsMessengerEXT, allocator: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_debug_utils_messenger_ext)(self.handle, messenger, allocator.as_raw_ptr()); } /// #[inline] pub unsafe fn submit_debug_utils_message( &self, message_severity: vk::DebugUtilsMessageSeverityFlagsEXT, message_types: vk::DebugUtilsMessageTypeFlagsEXT, callback_data: &vk::DebugUtilsMessengerCallbackDataEXT<'_>, ) { (self.fp.submit_debug_utils_message_ext)( self.handle, message_severity, message_types, callback_data, ); } } ash-0.38.0+1.3.281/src/extensions/ext/descriptor_buffer.rs000064400000000000000000000136601046102023000211460ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::ext::descriptor_buffer::Device { /// #[inline] pub unsafe fn get_descriptor_set_layout_size( &self, layout: vk::DescriptorSetLayout, ) -> vk::DeviceSize { let mut count = mem::MaybeUninit::uninit(); (self.fp.get_descriptor_set_layout_size_ext)(self.handle, layout, count.as_mut_ptr()); count.assume_init() } /// #[inline] pub unsafe fn get_descriptor_set_layout_binding_offset( &self, layout: vk::DescriptorSetLayout, binding: u32, ) -> vk::DeviceSize { let mut offset = mem::MaybeUninit::uninit(); (self.fp.get_descriptor_set_layout_binding_offset_ext)( self.handle, layout, binding, offset.as_mut_ptr(), ); offset.assume_init() } /// #[inline] pub unsafe fn get_descriptor( &self, descriptor_info: &vk::DescriptorGetInfoEXT<'_>, descriptor: &mut [u8], ) { (self.fp.get_descriptor_ext)( self.handle, descriptor_info, descriptor.len(), descriptor.as_mut_ptr().cast(), ) } /// #[inline] pub unsafe fn cmd_bind_descriptor_buffers( &self, command_buffer: vk::CommandBuffer, binding_info: &[vk::DescriptorBufferBindingInfoEXT<'_>], ) { (self.fp.cmd_bind_descriptor_buffers_ext)( command_buffer, binding_info.len() as u32, binding_info.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_descriptor_buffer_offsets( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, layout: vk::PipelineLayout, first_set: u32, buffer_indices: &[u32], offsets: &[vk::DeviceSize], ) { assert_eq!(buffer_indices.len(), offsets.len()); (self.fp.cmd_set_descriptor_buffer_offsets_ext)( command_buffer, pipeline_bind_point, layout, first_set, buffer_indices.len() as u32, buffer_indices.as_ptr(), offsets.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_bind_descriptor_buffer_embedded_samplers( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, layout: vk::PipelineLayout, set: u32, ) { (self.fp.cmd_bind_descriptor_buffer_embedded_samplers_ext)( command_buffer, pipeline_bind_point, layout, set, ) } /// #[inline] pub unsafe fn get_buffer_opaque_capture_descriptor_data( &self, info: &vk::BufferCaptureDescriptorDataInfoEXT<'_>, data: &mut [u8], ) -> VkResult<()> { (self.fp.get_buffer_opaque_capture_descriptor_data_ext)( self.handle, info, data.as_mut_ptr().cast(), ) .result() } /// #[inline] pub unsafe fn get_image_opaque_capture_descriptor_data( &self, info: &vk::ImageCaptureDescriptorDataInfoEXT<'_>, data: &mut [u8], ) -> VkResult<()> { (self.fp.get_image_opaque_capture_descriptor_data_ext)( self.handle, info, data.as_mut_ptr().cast(), ) .result() } /// #[inline] pub unsafe fn get_image_view_opaque_capture_descriptor_data( &self, info: &vk::ImageViewCaptureDescriptorDataInfoEXT<'_>, data: &mut [u8], ) -> VkResult<()> { (self.fp.get_image_view_opaque_capture_descriptor_data_ext)( self.handle, info, data.as_mut_ptr().cast(), ) .result() } /// #[inline] pub unsafe fn get_sampler_opaque_capture_descriptor_data( &self, info: &vk::SamplerCaptureDescriptorDataInfoEXT<'_>, data: &mut [u8], ) -> VkResult<()> { (self.fp.get_sampler_opaque_capture_descriptor_data_ext)( self.handle, info, data.as_mut_ptr().cast(), ) .result() } /// #[inline] pub unsafe fn get_acceleration_structure_opaque_capture_descriptor_data( &self, info: &vk::AccelerationStructureCaptureDescriptorDataInfoEXT<'_>, data: &mut [u8], ) -> VkResult<()> { (self .fp .get_acceleration_structure_opaque_capture_descriptor_data_ext)( self.handle, info, data.as_mut_ptr().cast(), ) .result() } } ash-0.38.0+1.3.281/src/extensions/ext/extended_dynamic_state.rs000064400000000000000000000130711046102023000221370ustar 00000000000000//! use crate::vk; use core::ptr; impl crate::ext::extended_dynamic_state::Device { /// #[inline] pub unsafe fn cmd_set_cull_mode( &self, command_buffer: vk::CommandBuffer, cull_mode: vk::CullModeFlags, ) { (self.fp.cmd_set_cull_mode_ext)(command_buffer, cull_mode) } /// #[inline] pub unsafe fn cmd_set_front_face( &self, command_buffer: vk::CommandBuffer, front_face: vk::FrontFace, ) { (self.fp.cmd_set_front_face_ext)(command_buffer, front_face) } /// #[inline] pub unsafe fn cmd_set_primitive_topology( &self, command_buffer: vk::CommandBuffer, primitive_topology: vk::PrimitiveTopology, ) { (self.fp.cmd_set_primitive_topology_ext)(command_buffer, primitive_topology) } /// #[inline] pub unsafe fn cmd_set_viewport_with_count( &self, command_buffer: vk::CommandBuffer, viewports: &[vk::Viewport], ) { (self.fp.cmd_set_viewport_with_count_ext)( command_buffer, viewports.len() as u32, viewports.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_scissor_with_count( &self, command_buffer: vk::CommandBuffer, scissors: &[vk::Rect2D], ) { (self.fp.cmd_set_scissor_with_count_ext)( command_buffer, scissors.len() as u32, scissors.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_bind_vertex_buffers2( &self, command_buffer: vk::CommandBuffer, first_binding: u32, buffers: &[vk::Buffer], offsets: &[vk::DeviceSize], sizes: Option<&[vk::DeviceSize]>, strides: Option<&[vk::DeviceSize]>, ) { assert_eq!(offsets.len(), buffers.len()); let p_sizes = if let Some(sizes) = sizes { assert_eq!(sizes.len(), buffers.len()); sizes.as_ptr() } else { ptr::null() }; let p_strides = if let Some(strides) = strides { assert_eq!(strides.len(), buffers.len()); strides.as_ptr() } else { ptr::null() }; (self.fp.cmd_bind_vertex_buffers2_ext)( command_buffer, first_binding, buffers.len() as u32, buffers.as_ptr(), offsets.as_ptr(), p_sizes, p_strides, ) } /// #[inline] pub unsafe fn cmd_set_depth_test_enable( &self, command_buffer: vk::CommandBuffer, depth_test_enable: bool, ) { (self.fp.cmd_set_depth_test_enable_ext)(command_buffer, depth_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_write_enable( &self, command_buffer: vk::CommandBuffer, depth_write_enable: bool, ) { (self.fp.cmd_set_depth_write_enable_ext)(command_buffer, depth_write_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_compare_op( &self, command_buffer: vk::CommandBuffer, depth_compare_op: vk::CompareOp, ) { (self.fp.cmd_set_depth_compare_op_ext)(command_buffer, depth_compare_op) } /// #[inline] pub unsafe fn cmd_set_depth_bounds_test_enable( &self, command_buffer: vk::CommandBuffer, depth_bounds_test_enable: bool, ) { (self.fp.cmd_set_depth_bounds_test_enable_ext)( command_buffer, depth_bounds_test_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_stencil_test_enable( &self, command_buffer: vk::CommandBuffer, stencil_test_enable: bool, ) { (self.fp.cmd_set_stencil_test_enable_ext)(command_buffer, stencil_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_stencil_op( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, fail_op: vk::StencilOp, pass_op: vk::StencilOp, depth_fail_op: vk::StencilOp, compare_op: vk::CompareOp, ) { (self.fp.cmd_set_stencil_op_ext)( command_buffer, face_mask, fail_op, pass_op, depth_fail_op, compare_op, ) } } ash-0.38.0+1.3.281/src/extensions/ext/extended_dynamic_state2.rs000064400000000000000000000040641046102023000222230ustar 00000000000000//! use crate::vk; impl crate::ext::extended_dynamic_state2::Device { /// #[inline] pub unsafe fn cmd_set_patch_control_points( &self, command_buffer: vk::CommandBuffer, patch_control_points: u32, ) { (self.fp.cmd_set_patch_control_points_ext)(command_buffer, patch_control_points) } /// #[inline] pub unsafe fn cmd_set_rasterizer_discard_enable( &self, command_buffer: vk::CommandBuffer, rasterizer_discard_enable: bool, ) { (self.fp.cmd_set_rasterizer_discard_enable_ext)( command_buffer, rasterizer_discard_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_depth_bias_enable( &self, command_buffer: vk::CommandBuffer, depth_bias_enable: bool, ) { (self.fp.cmd_set_depth_bias_enable_ext)(command_buffer, depth_bias_enable.into()) } /// #[inline] pub unsafe fn cmd_set_logic_op( &self, command_buffer: vk::CommandBuffer, logic_op: vk::LogicOp, ) { (self.fp.cmd_set_logic_op_ext)(command_buffer, logic_op) } /// #[inline] pub unsafe fn cmd_set_primitive_restart_enable( &self, command_buffer: vk::CommandBuffer, primitive_restart_enable: bool, ) { (self.fp.cmd_set_primitive_restart_enable_ext)( command_buffer, primitive_restart_enable.into(), ) } } ash-0.38.0+1.3.281/src/extensions/ext/extended_dynamic_state3.rs000064400000000000000000000330351046102023000222240ustar 00000000000000//! use crate::vk; impl crate::ext::extended_dynamic_state3::Device { /// #[inline] pub unsafe fn cmd_set_tessellation_domain_origin( &self, command_buffer: vk::CommandBuffer, domain_origin: vk::TessellationDomainOrigin, ) { (self.fp.cmd_set_tessellation_domain_origin_ext)(command_buffer, domain_origin) } /// #[inline] pub unsafe fn cmd_set_depth_clamp_enable( &self, command_buffer: vk::CommandBuffer, depth_clamp_enable: bool, ) { (self.fp.cmd_set_depth_clamp_enable_ext)(command_buffer, depth_clamp_enable.into()) } /// #[inline] pub unsafe fn cmd_set_polygon_mode( &self, command_buffer: vk::CommandBuffer, polygon_mode: vk::PolygonMode, ) { (self.fp.cmd_set_polygon_mode_ext)(command_buffer, polygon_mode) } /// #[inline] pub unsafe fn cmd_set_rasterization_samples( &self, command_buffer: vk::CommandBuffer, rasterization_samples: vk::SampleCountFlags, ) { (self.fp.cmd_set_rasterization_samples_ext)(command_buffer, rasterization_samples) } /// #[inline] pub unsafe fn cmd_set_sample_mask( &self, command_buffer: vk::CommandBuffer, samples: vk::SampleCountFlags, sample_mask: &[vk::SampleMask], ) { assert!( samples.as_raw().is_power_of_two(), "Only one SampleCount bit must be set" ); assert_eq!((samples.as_raw() as usize + 31) / 32, sample_mask.len()); (self.fp.cmd_set_sample_mask_ext)(command_buffer, samples, sample_mask.as_ptr()) } /// #[inline] pub unsafe fn cmd_set_alpha_to_coverage_enable( &self, command_buffer: vk::CommandBuffer, alpha_to_coverage_enable: bool, ) { (self.fp.cmd_set_alpha_to_coverage_enable_ext)( command_buffer, alpha_to_coverage_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_alpha_to_one_enable( &self, command_buffer: vk::CommandBuffer, alpha_to_one_enable: bool, ) { (self.fp.cmd_set_alpha_to_one_enable_ext)(command_buffer, alpha_to_one_enable.into()) } /// #[inline] pub unsafe fn cmd_set_logic_op_enable( &self, command_buffer: vk::CommandBuffer, logic_op_enable: bool, ) { (self.fp.cmd_set_logic_op_enable_ext)(command_buffer, logic_op_enable.into()) } /// #[inline] pub unsafe fn cmd_set_color_blend_enable( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_enables: &[vk::Bool32], ) { (self.fp.cmd_set_color_blend_enable_ext)( command_buffer, first_attachment, color_blend_enables.len() as u32, color_blend_enables.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_color_blend_equation( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_equations: &[vk::ColorBlendEquationEXT], ) { (self.fp.cmd_set_color_blend_equation_ext)( command_buffer, first_attachment, color_blend_equations.len() as u32, color_blend_equations.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_color_write_mask( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_write_masks: &[vk::ColorComponentFlags], ) { (self.fp.cmd_set_color_write_mask_ext)( command_buffer, first_attachment, color_write_masks.len() as u32, color_write_masks.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_rasterization_stream( &self, command_buffer: vk::CommandBuffer, rasterization_stream: u32, ) { (self.fp.cmd_set_rasterization_stream_ext)(command_buffer, rasterization_stream) } /// #[inline] pub unsafe fn cmd_set_conservative_rasterization_mode( &self, command_buffer: vk::CommandBuffer, conservative_rasterization_mode: vk::ConservativeRasterizationModeEXT, ) { (self.fp.cmd_set_conservative_rasterization_mode_ext)( command_buffer, conservative_rasterization_mode, ) } /// #[inline] pub unsafe fn cmd_set_extra_primitive_overestimation_size( &self, command_buffer: vk::CommandBuffer, extra_primitive_overestimation_size: f32, ) { (self.fp.cmd_set_extra_primitive_overestimation_size_ext)( command_buffer, extra_primitive_overestimation_size, ) } /// #[inline] pub unsafe fn cmd_set_depth_clip_enable( &self, command_buffer: vk::CommandBuffer, depth_clip_enable: bool, ) { (self.fp.cmd_set_depth_clip_enable_ext)(command_buffer, depth_clip_enable.into()) } /// #[inline] pub unsafe fn cmd_set_sample_locations_enable( &self, command_buffer: vk::CommandBuffer, sample_locations_enable: bool, ) { (self.fp.cmd_set_sample_locations_enable_ext)( command_buffer, sample_locations_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_color_blend_advanced( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_advanced: &[vk::ColorBlendAdvancedEXT], ) { (self.fp.cmd_set_color_blend_advanced_ext)( command_buffer, first_attachment, color_blend_advanced.len() as u32, color_blend_advanced.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_provoking_vertex_mode( &self, command_buffer: vk::CommandBuffer, provoking_vertex_mode: vk::ProvokingVertexModeEXT, ) { (self.fp.cmd_set_provoking_vertex_mode_ext)(command_buffer, provoking_vertex_mode) } /// #[inline] pub unsafe fn cmd_set_line_rasterization_mode( &self, command_buffer: vk::CommandBuffer, line_rasterization_mode: vk::LineRasterizationModeEXT, ) { (self.fp.cmd_set_line_rasterization_mode_ext)(command_buffer, line_rasterization_mode) } /// #[inline] pub unsafe fn cmd_set_line_stipple_enable( &self, command_buffer: vk::CommandBuffer, stippled_line_enable: bool, ) { (self.fp.cmd_set_line_stipple_enable_ext)(command_buffer, stippled_line_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_clip_negative_one_to_one( &self, command_buffer: vk::CommandBuffer, negative_one_to_one: bool, ) { (self.fp.cmd_set_depth_clip_negative_one_to_one_ext)( command_buffer, negative_one_to_one.into(), ) } /// #[inline] pub unsafe fn cmd_set_viewport_w_scaling_enable_nv( &self, command_buffer: vk::CommandBuffer, viewport_w_scaling_enable: bool, ) { (self.fp.cmd_set_viewport_w_scaling_enable_nv)( command_buffer, viewport_w_scaling_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_viewport_swizzle_nv( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, viewport_swizzles: &[vk::ViewportSwizzleNV], ) { (self.fp.cmd_set_viewport_swizzle_nv)( command_buffer, first_attachment, viewport_swizzles.len() as u32, viewport_swizzles.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_to_color_enable_nv( &self, command_buffer: vk::CommandBuffer, coverage_to_color_enable: bool, ) { (self.fp.cmd_set_coverage_to_color_enable_nv)( command_buffer, coverage_to_color_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_to_color_location_nv( &self, command_buffer: vk::CommandBuffer, coverage_to_color_location: u32, ) { (self.fp.cmd_set_coverage_to_color_location_nv)(command_buffer, coverage_to_color_location) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_mode_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_mode: vk::CoverageModulationModeNV, ) { (self.fp.cmd_set_coverage_modulation_mode_nv)(command_buffer, coverage_modulation_mode) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_table_enable_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_table_enable: bool, ) { (self.fp.cmd_set_coverage_modulation_table_enable_nv)( command_buffer, coverage_modulation_table_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_table_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_table: &[f32], ) { (self.fp.cmd_set_coverage_modulation_table_nv)( command_buffer, coverage_modulation_table.len() as u32, coverage_modulation_table.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_shading_rate_image_enable_nv( &self, command_buffer: vk::CommandBuffer, shading_rate_image_enable: bool, ) { (self.fp.cmd_set_shading_rate_image_enable_nv)( command_buffer, shading_rate_image_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_representative_fragment_test_enable_nv( &self, command_buffer: vk::CommandBuffer, representative_fragment_test_enable: bool, ) { (self.fp.cmd_set_representative_fragment_test_enable_nv)( command_buffer, representative_fragment_test_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_reduction_mode_nv( &self, command_buffer: vk::CommandBuffer, coverage_reduction_mode: vk::CoverageReductionModeNV, ) { (self.fp.cmd_set_coverage_reduction_mode_nv)(command_buffer, coverage_reduction_mode) } } ash-0.38.0+1.3.281/src/extensions/ext/full_screen_exclusive.rs000064400000000000000000000042761046102023000220320ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::ext::full_screen_exclusive::Device { /// #[inline] pub unsafe fn acquire_full_screen_exclusive_mode( &self, swapchain: vk::SwapchainKHR, ) -> VkResult<()> { (self.fp.acquire_full_screen_exclusive_mode_ext)(self.handle, swapchain).result() } /// #[inline] pub unsafe fn release_full_screen_exclusive_mode( &self, swapchain: vk::SwapchainKHR, ) -> VkResult<()> { (self.fp.release_full_screen_exclusive_mode_ext)(self.handle, swapchain).result() } /// #[inline] pub unsafe fn get_device_group_surface_present_modes2( &self, surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR<'_>, ) -> VkResult { let mut present_modes = mem::MaybeUninit::uninit(); (self.fp.get_device_group_surface_present_modes2_ext)( self.handle, surface_info, present_modes.as_mut_ptr(), ) .assume_init_on_success(present_modes) } } impl crate::ext::full_screen_exclusive::Instance { /// #[inline] pub unsafe fn get_physical_device_surface_present_modes2( &self, physical_device: vk::PhysicalDevice, surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR<'_>, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_surface_present_modes2_ext)( physical_device, surface_info, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/ext/hdr_metadata.rs000064400000000000000000000012141046102023000200440ustar 00000000000000//! use crate::vk; impl crate::ext::hdr_metadata::Device { /// #[inline] pub unsafe fn set_hdr_metadata( &self, swapchains: &[vk::SwapchainKHR], metadata: &[vk::HdrMetadataEXT<'_>], ) { assert_eq!(swapchains.len(), metadata.len()); (self.fp.set_hdr_metadata_ext)( self.handle, swapchains.len() as u32, swapchains.as_ptr(), metadata.as_ptr(), ) } } ash-0.38.0+1.3.281/src/extensions/ext/headless_surface.rs000064400000000000000000000015301046102023000207300ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::ext::headless_surface::Instance { /// #[inline] pub unsafe fn create_headless_surface( &self, create_info: &vk::HeadlessSurfaceCreateInfoEXT<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_headless_surface_ext)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/ext/host_image_copy.rs000064400000000000000000000053451046102023000206110ustar 00000000000000//! use crate::prelude::*; use crate::vk; #[cfg(doc)] use crate::{ext, khr}; impl crate::ext::host_image_copy::Device { /// #[inline] pub unsafe fn copy_memory_to_image( &self, copy_memory_to_image_info: &vk::CopyMemoryToImageInfoEXT<'_>, ) -> VkResult<()> { (self.fp.copy_memory_to_image_ext)(self.handle, copy_memory_to_image_info).result() } /// #[inline] pub unsafe fn copy_image_to_memory( &self, copy_image_to_memory_info: &vk::CopyImageToMemoryInfoEXT<'_>, ) -> VkResult<()> { (self.fp.copy_image_to_memory_ext)(self.handle, copy_image_to_memory_info).result() } /// #[inline] pub unsafe fn copy_image_to_image( &self, copy_image_to_image_info: &vk::CopyImageToImageInfoEXT<'_>, ) -> VkResult<()> { (self.fp.copy_image_to_image_ext)(self.handle, copy_image_to_image_info).result() } /// #[inline] pub unsafe fn transition_image_layout( &self, transitions: &[vk::HostImageLayoutTransitionInfoEXT<'_>], ) -> VkResult<()> { (self.fp.transition_image_layout_ext)( self.handle, transitions.len() as u32, transitions.as_ptr(), ) .result() } /// /// /// Also available as [`khr::maintenance5::Device::get_image_subresource_layout2()`] /// when [`VK_KHR_maintenance5`] is enabled. /// /// Also available as [`ext::image_compression_control::Device::get_image_subresource_layout2()`] /// when [`VK_EXT_image_compression_control`] is enabled. /// /// [`VK_KHR_maintenance5`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_maintenance5.html /// [`VK_EXT_image_compression_control`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_image_compression_control.html #[inline] pub unsafe fn get_image_subresource_layout2( &self, image: vk::Image, subresource: &vk::ImageSubresource2EXT<'_>, layout: &mut vk::SubresourceLayout2EXT<'_>, ) { (self.fp.get_image_subresource_layout2_ext)(self.handle, image, subresource, layout) } } ash-0.38.0+1.3.281/src/extensions/ext/image_compression_control.rs000064400000000000000000000022651046102023000227010ustar 00000000000000//! use crate::vk; #[cfg(doc)] use crate::{ext, khr}; impl crate::ext::image_compression_control::Device { /// /// /// Also available as [`khr::maintenance5::Device::get_image_subresource_layout2()`] /// when [`VK_KHR_maintenance5`] is enabled. /// /// Also available as [`ext::host_image_copy::Device::get_image_subresource_layout2()`] /// when [`VK_EXT_host_image_copy`] is enabled. /// /// [`VK_KHR_maintenance5`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_maintenance5.html /// [`VK_EXT_host_image_copy`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_host_image_copy.html #[inline] pub unsafe fn get_image_subresource_layout2( &self, image: vk::Image, subresource: &vk::ImageSubresource2EXT<'_>, layout: &mut vk::SubresourceLayout2EXT<'_>, ) { (self.fp.get_image_subresource_layout2_ext)(self.handle, image, subresource, layout) } } ash-0.38.0+1.3.281/src/extensions/ext/image_drm_format_modifier.rs000064400000000000000000000012231046102023000226010ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::ext::image_drm_format_modifier::Device { /// #[inline] pub unsafe fn get_image_drm_format_modifier_properties( &self, image: vk::Image, properties: &mut vk::ImageDrmFormatModifierPropertiesEXT<'_>, ) -> VkResult<()> { (self.fp.get_image_drm_format_modifier_properties_ext)(self.handle, image, properties) .result() } } ash-0.38.0+1.3.281/src/extensions/ext/mesh_shader.rs000064400000000000000000000045531046102023000177220ustar 00000000000000//! use crate::vk; impl crate::ext::mesh_shader::Device { /// #[inline] pub unsafe fn cmd_draw_mesh_tasks( &self, command_buffer: vk::CommandBuffer, group_count_x: u32, group_count_y: u32, group_count_z: u32, ) { (self.fp.cmd_draw_mesh_tasks_ext)( command_buffer, group_count_x, group_count_y, group_count_z, ) } /// /// /// `buffer` contains `draw_count` [`vk::DrawMeshTasksIndirectCommandEXT`] structures starting at `offset` in bytes, holding the draw parameters. #[inline] pub unsafe fn cmd_draw_mesh_tasks_indirect( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_mesh_tasks_indirect_ext)( command_buffer, buffer, offset, draw_count, stride, ) } /// /// /// `buffer` contains a maximum of `max_draw_count` [`vk::DrawMeshTasksIndirectCommandEXT`] structures starting at `offset` in bytes, holding the draw parameters. /// `count_buffer` is the buffer containing the draw count, starting at `count_buffer_offset` in bytes. /// The actual number of executed draw calls is the minimum of the count specified in `count_buffer` and `max_draw_count`. #[inline] pub unsafe fn cmd_draw_mesh_tasks_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_mesh_tasks_indirect_count_ext)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ) } } ash-0.38.0+1.3.281/src/extensions/ext/metal_surface.rs000064400000000000000000000015061046102023000202450ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::ext::metal_surface::Instance { /// #[inline] pub unsafe fn create_metal_surface( &self, create_info: &vk::MetalSurfaceCreateInfoEXT<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_metal_surface_ext)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/ext/mod.rs000064400000000000000000000015571046102023000162200ustar 00000000000000pub mod acquire_drm_display; pub mod buffer_device_address; pub mod calibrated_timestamps; #[deprecated(note = "Please use the [DebugUtils](struct.DebugUtils.html) extension instead.")] pub mod debug_marker; #[deprecated(note = "Please use the [DebugUtils](struct.DebugUtils.html) extension instead.")] pub mod debug_report; pub mod debug_utils; pub mod descriptor_buffer; pub mod extended_dynamic_state; pub mod extended_dynamic_state2; pub mod extended_dynamic_state3; pub mod full_screen_exclusive; pub mod hdr_metadata; pub mod headless_surface; pub mod host_image_copy; pub mod image_compression_control; pub mod image_drm_format_modifier; pub mod mesh_shader; pub mod metal_surface; pub mod pipeline_properties; pub mod private_data; pub mod sample_locations; pub mod shader_object; pub mod swapchain_maintenance1; pub mod tooling_info; pub mod vertex_input_dynamic_state; ash-0.38.0+1.3.281/src/extensions/ext/pipeline_properties.rs000064400000000000000000000013551046102023000215160ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::ext::pipeline_properties::Device { /// #[inline] pub unsafe fn get_pipeline_properties( &self, pipeline_info: &vk::PipelineInfoEXT<'_>, pipeline_properties: &mut impl crate::ext::pipeline_properties::GetPipelinePropertiesEXTParamPipelineProperties, ) -> VkResult<()> { (self.fp.get_pipeline_properties_ext)( self.handle, pipeline_info, <*mut _>::cast(pipeline_properties), ) .result() } } ash-0.38.0+1.3.281/src/extensions/ext/private_data.rs000064400000000000000000000045351046102023000201030ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::ext::private_data::Device { /// #[inline] pub unsafe fn create_private_data_slot( &self, create_info: &vk::PrivateDataSlotCreateInfoEXT<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut private_data_slot = mem::MaybeUninit::uninit(); (self.fp.create_private_data_slot_ext)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), private_data_slot.as_mut_ptr(), ) .assume_init_on_success(private_data_slot) } /// #[inline] pub unsafe fn destroy_private_data_slot( &self, private_data_slot: vk::PrivateDataSlotEXT, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_private_data_slot_ext)( self.handle, private_data_slot, allocation_callbacks.as_raw_ptr(), ) } /// #[inline] pub unsafe fn set_private_data( &self, object: T, private_data_slot: vk::PrivateDataSlotEXT, data: u64, ) -> VkResult<()> { (self.fp.set_private_data_ext)( self.handle, T::TYPE, object.as_raw(), private_data_slot, data, ) .result() } /// #[inline] pub unsafe fn get_private_data( &self, object: T, private_data_slot: vk::PrivateDataSlotEXT, ) -> u64 { let mut data = mem::MaybeUninit::uninit(); (self.fp.get_private_data_ext)( self.handle, T::TYPE, object.as_raw(), private_data_slot, data.as_mut_ptr(), ); data.assume_init() } } ash-0.38.0+1.3.281/src/extensions/ext/sample_locations.rs000064400000000000000000000021741046102023000207710ustar 00000000000000//! use crate::vk; impl crate::ext::sample_locations::Device { /// #[inline] pub unsafe fn cmd_set_sample_locations( &self, command_buffer: vk::CommandBuffer, sample_locations_info: &vk::SampleLocationsInfoEXT<'_>, ) { (self.fp.cmd_set_sample_locations_ext)(command_buffer, sample_locations_info) } } impl crate::ext::sample_locations::Instance { /// #[inline] pub unsafe fn get_physical_device_multisample_properties( &self, physical_device: vk::PhysicalDevice, samples: vk::SampleCountFlags, multisample_properties: &mut vk::MultisamplePropertiesEXT<'_>, ) { (self.fp.get_physical_device_multisample_properties_ext)( physical_device, samples, multisample_properties, ) } } ash-0.38.0+1.3.281/src/extensions/ext/shader_object.rs000064400000000000000000000604641046102023000202370ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::ptr; impl crate::ext::shader_object::Device { /// /// /// When this function returns, whether or not it succeeds, it is guaranteed that every returned /// element is either [`vk::ShaderEXT::null()`] or a valid [`vk::ShaderEXT`] handle. /// /// This means that whenever shader creation fails, the application can determine which shader /// the returned error pertains to by locating the first [`vk::Handle::is_null()`] element /// in the returned [`Vec`]. It also means that an application can reliably clean up from a /// failed call by iterating over the returned [`Vec`] and destroying every element that is not /// [`vk::Handle::is_null()`]. #[inline] pub unsafe fn create_shaders( &self, create_infos: &[vk::ShaderCreateInfoEXT<'_>], allocator: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut shaders = Vec::with_capacity(create_infos.len()); let err_code = (self.fp.create_shaders_ext)( self.handle, create_infos.len() as u32, create_infos.as_ptr(), allocator.as_raw_ptr(), shaders.as_mut_ptr(), ); shaders.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(shaders), _ => Err((shaders, err_code)), } } /// #[inline] pub unsafe fn destroy_shader( &self, shader: vk::ShaderEXT, allocator: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_shader_ext)(self.handle, shader, allocator.as_raw_ptr()) } /// #[inline] pub unsafe fn get_shader_binary_data(&self, shader: vk::ShaderEXT) -> VkResult> { read_into_uninitialized_vector(|count, data: *mut u8| { (self.fp.get_shader_binary_data_ext)(self.handle, shader, count, data.cast()) }) } /// #[inline] pub unsafe fn cmd_bind_shaders( &self, command_buffer: vk::CommandBuffer, stages: &[vk::ShaderStageFlags], shaders: &[vk::ShaderEXT], ) { assert_eq!(stages.len(), shaders.len()); (self.fp.cmd_bind_shaders_ext)( command_buffer, stages.len() as u32, stages.as_ptr(), shaders.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_vertex_input( &self, command_buffer: vk::CommandBuffer, vertex_binding_descriptions: &[vk::VertexInputBindingDescription2EXT<'_>], vertex_attribute_descriptions: &[vk::VertexInputAttributeDescription2EXT<'_>], ) { (self.fp.cmd_set_vertex_input_ext)( command_buffer, vertex_binding_descriptions.len() as u32, vertex_binding_descriptions.as_ptr(), vertex_attribute_descriptions.len() as u32, vertex_attribute_descriptions.as_ptr(), ) } // --- extended_dynamic_state functions --- /// #[inline] pub unsafe fn cmd_set_cull_mode( &self, command_buffer: vk::CommandBuffer, cull_mode: vk::CullModeFlags, ) { (self.fp.cmd_set_cull_mode_ext)(command_buffer, cull_mode) } /// #[inline] pub unsafe fn cmd_set_front_face( &self, command_buffer: vk::CommandBuffer, front_face: vk::FrontFace, ) { (self.fp.cmd_set_front_face_ext)(command_buffer, front_face) } /// #[inline] pub unsafe fn cmd_set_primitive_topology( &self, command_buffer: vk::CommandBuffer, primitive_topology: vk::PrimitiveTopology, ) { (self.fp.cmd_set_primitive_topology_ext)(command_buffer, primitive_topology) } /// #[inline] pub unsafe fn cmd_set_viewport_with_count( &self, command_buffer: vk::CommandBuffer, viewports: &[vk::Viewport], ) { (self.fp.cmd_set_viewport_with_count_ext)( command_buffer, viewports.len() as u32, viewports.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_scissor_with_count( &self, command_buffer: vk::CommandBuffer, scissors: &[vk::Rect2D], ) { (self.fp.cmd_set_scissor_with_count_ext)( command_buffer, scissors.len() as u32, scissors.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_bind_vertex_buffers2( &self, command_buffer: vk::CommandBuffer, first_binding: u32, buffers: &[vk::Buffer], offsets: &[vk::DeviceSize], sizes: Option<&[vk::DeviceSize]>, strides: Option<&[vk::DeviceSize]>, ) { assert_eq!(offsets.len(), buffers.len()); let p_sizes = if let Some(sizes) = sizes { assert_eq!(sizes.len(), buffers.len()); sizes.as_ptr() } else { ptr::null() }; let p_strides = if let Some(strides) = strides { assert_eq!(strides.len(), buffers.len()); strides.as_ptr() } else { ptr::null() }; (self.fp.cmd_bind_vertex_buffers2_ext)( command_buffer, first_binding, buffers.len() as u32, buffers.as_ptr(), offsets.as_ptr(), p_sizes, p_strides, ) } /// #[inline] pub unsafe fn cmd_set_depth_test_enable( &self, command_buffer: vk::CommandBuffer, depth_test_enable: bool, ) { (self.fp.cmd_set_depth_test_enable_ext)(command_buffer, depth_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_write_enable( &self, command_buffer: vk::CommandBuffer, depth_write_enable: bool, ) { (self.fp.cmd_set_depth_write_enable_ext)(command_buffer, depth_write_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_compare_op( &self, command_buffer: vk::CommandBuffer, depth_compare_op: vk::CompareOp, ) { (self.fp.cmd_set_depth_compare_op_ext)(command_buffer, depth_compare_op) } /// #[inline] pub unsafe fn cmd_set_depth_bounds_test_enable( &self, command_buffer: vk::CommandBuffer, depth_bounds_test_enable: bool, ) { (self.fp.cmd_set_depth_bounds_test_enable_ext)( command_buffer, depth_bounds_test_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_stencil_test_enable( &self, command_buffer: vk::CommandBuffer, stencil_test_enable: bool, ) { (self.fp.cmd_set_stencil_test_enable_ext)(command_buffer, stencil_test_enable.into()) } /// #[inline] pub unsafe fn cmd_set_stencil_op( &self, command_buffer: vk::CommandBuffer, face_mask: vk::StencilFaceFlags, fail_op: vk::StencilOp, pass_op: vk::StencilOp, depth_fail_op: vk::StencilOp, compare_op: vk::CompareOp, ) { (self.fp.cmd_set_stencil_op_ext)( command_buffer, face_mask, fail_op, pass_op, depth_fail_op, compare_op, ) } // --- extended_dynamic_state2 functions --- /// #[inline] pub unsafe fn cmd_set_patch_control_points( &self, command_buffer: vk::CommandBuffer, patch_control_points: u32, ) { (self.fp.cmd_set_patch_control_points_ext)(command_buffer, patch_control_points) } /// #[inline] pub unsafe fn cmd_set_rasterizer_discard_enable( &self, command_buffer: vk::CommandBuffer, rasterizer_discard_enable: bool, ) { (self.fp.cmd_set_rasterizer_discard_enable_ext)( command_buffer, rasterizer_discard_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_depth_bias_enable( &self, command_buffer: vk::CommandBuffer, depth_bias_enable: bool, ) { (self.fp.cmd_set_depth_bias_enable_ext)(command_buffer, depth_bias_enable.into()) } /// #[inline] pub unsafe fn cmd_set_logic_op( &self, command_buffer: vk::CommandBuffer, logic_op: vk::LogicOp, ) { (self.fp.cmd_set_logic_op_ext)(command_buffer, logic_op) } /// #[inline] pub unsafe fn cmd_set_primitive_restart_enable( &self, command_buffer: vk::CommandBuffer, primitive_restart_enable: bool, ) { (self.fp.cmd_set_primitive_restart_enable_ext)( command_buffer, primitive_restart_enable.into(), ) } // --- extended_dynamic_state3 functions --- /// #[inline] pub unsafe fn cmd_set_tessellation_domain_origin( &self, command_buffer: vk::CommandBuffer, domain_origin: vk::TessellationDomainOrigin, ) { (self.fp.cmd_set_tessellation_domain_origin_ext)(command_buffer, domain_origin) } /// #[inline] pub unsafe fn cmd_set_depth_clamp_enable( &self, command_buffer: vk::CommandBuffer, depth_clamp_enable: bool, ) { (self.fp.cmd_set_depth_clamp_enable_ext)(command_buffer, depth_clamp_enable.into()) } /// #[inline] pub unsafe fn cmd_set_polygon_mode( &self, command_buffer: vk::CommandBuffer, polygon_mode: vk::PolygonMode, ) { (self.fp.cmd_set_polygon_mode_ext)(command_buffer, polygon_mode) } /// #[inline] pub unsafe fn cmd_set_rasterization_samples( &self, command_buffer: vk::CommandBuffer, rasterization_samples: vk::SampleCountFlags, ) { (self.fp.cmd_set_rasterization_samples_ext)(command_buffer, rasterization_samples) } /// #[inline] pub unsafe fn cmd_set_sample_mask( &self, command_buffer: vk::CommandBuffer, samples: vk::SampleCountFlags, sample_mask: &[vk::SampleMask], ) { assert!( samples.as_raw().is_power_of_two(), "Only one SampleCount bit must be set" ); assert_eq!((samples.as_raw() as usize + 31) / 32, sample_mask.len()); (self.fp.cmd_set_sample_mask_ext)(command_buffer, samples, sample_mask.as_ptr()) } /// #[inline] pub unsafe fn cmd_set_alpha_to_coverage_enable( &self, command_buffer: vk::CommandBuffer, alpha_to_coverage_enable: bool, ) { (self.fp.cmd_set_alpha_to_coverage_enable_ext)( command_buffer, alpha_to_coverage_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_alpha_to_one_enable( &self, command_buffer: vk::CommandBuffer, alpha_to_one_enable: bool, ) { (self.fp.cmd_set_alpha_to_one_enable_ext)(command_buffer, alpha_to_one_enable.into()) } /// #[inline] pub unsafe fn cmd_set_logic_op_enable( &self, command_buffer: vk::CommandBuffer, logic_op_enable: bool, ) { (self.fp.cmd_set_logic_op_enable_ext)(command_buffer, logic_op_enable.into()) } /// #[inline] pub unsafe fn cmd_set_color_blend_enable( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_enables: &[vk::Bool32], ) { (self.fp.cmd_set_color_blend_enable_ext)( command_buffer, first_attachment, color_blend_enables.len() as u32, color_blend_enables.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_color_blend_equation( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_equations: &[vk::ColorBlendEquationEXT], ) { (self.fp.cmd_set_color_blend_equation_ext)( command_buffer, first_attachment, color_blend_equations.len() as u32, color_blend_equations.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_color_write_mask( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_write_masks: &[vk::ColorComponentFlags], ) { (self.fp.cmd_set_color_write_mask_ext)( command_buffer, first_attachment, color_write_masks.len() as u32, color_write_masks.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_rasterization_stream( &self, command_buffer: vk::CommandBuffer, rasterization_stream: u32, ) { (self.fp.cmd_set_rasterization_stream_ext)(command_buffer, rasterization_stream) } /// #[inline] pub unsafe fn cmd_set_conservative_rasterization_mode( &self, command_buffer: vk::CommandBuffer, conservative_rasterization_mode: vk::ConservativeRasterizationModeEXT, ) { (self.fp.cmd_set_conservative_rasterization_mode_ext)( command_buffer, conservative_rasterization_mode, ) } /// #[inline] pub unsafe fn cmd_set_extra_primitive_overestimation_size( &self, command_buffer: vk::CommandBuffer, extra_primitive_overestimation_size: f32, ) { (self.fp.cmd_set_extra_primitive_overestimation_size_ext)( command_buffer, extra_primitive_overestimation_size, ) } /// #[inline] pub unsafe fn cmd_set_depth_clip_enable( &self, command_buffer: vk::CommandBuffer, depth_clip_enable: bool, ) { (self.fp.cmd_set_depth_clip_enable_ext)(command_buffer, depth_clip_enable.into()) } /// #[inline] pub unsafe fn cmd_set_sample_locations_enable( &self, command_buffer: vk::CommandBuffer, sample_locations_enable: bool, ) { (self.fp.cmd_set_sample_locations_enable_ext)( command_buffer, sample_locations_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_color_blend_advanced( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, color_blend_advanced: &[vk::ColorBlendAdvancedEXT], ) { (self.fp.cmd_set_color_blend_advanced_ext)( command_buffer, first_attachment, color_blend_advanced.len() as u32, color_blend_advanced.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_provoking_vertex_mode( &self, command_buffer: vk::CommandBuffer, provoking_vertex_mode: vk::ProvokingVertexModeEXT, ) { (self.fp.cmd_set_provoking_vertex_mode_ext)(command_buffer, provoking_vertex_mode) } /// #[inline] pub unsafe fn cmd_set_line_rasterization_mode( &self, command_buffer: vk::CommandBuffer, line_rasterization_mode: vk::LineRasterizationModeEXT, ) { (self.fp.cmd_set_line_rasterization_mode_ext)(command_buffer, line_rasterization_mode) } /// #[inline] pub unsafe fn cmd_set_line_stipple_enable( &self, command_buffer: vk::CommandBuffer, stippled_line_enable: bool, ) { (self.fp.cmd_set_line_stipple_enable_ext)(command_buffer, stippled_line_enable.into()) } /// #[inline] pub unsafe fn cmd_set_depth_clip_negative_one_to_one( &self, command_buffer: vk::CommandBuffer, negative_one_to_one: bool, ) { (self.fp.cmd_set_depth_clip_negative_one_to_one_ext)( command_buffer, negative_one_to_one.into(), ) } /// #[inline] pub unsafe fn cmd_set_viewport_w_scaling_enable_nv( &self, command_buffer: vk::CommandBuffer, viewport_w_scaling_enable: bool, ) { (self.fp.cmd_set_viewport_w_scaling_enable_nv)( command_buffer, viewport_w_scaling_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_viewport_swizzle_nv( &self, command_buffer: vk::CommandBuffer, first_attachment: u32, viewport_swizzles: &[vk::ViewportSwizzleNV], ) { (self.fp.cmd_set_viewport_swizzle_nv)( command_buffer, first_attachment, viewport_swizzles.len() as u32, viewport_swizzles.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_to_color_enable_nv( &self, command_buffer: vk::CommandBuffer, coverage_to_color_enable: bool, ) { (self.fp.cmd_set_coverage_to_color_enable_nv)( command_buffer, coverage_to_color_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_to_color_location_nv( &self, command_buffer: vk::CommandBuffer, coverage_to_color_location: u32, ) { (self.fp.cmd_set_coverage_to_color_location_nv)(command_buffer, coverage_to_color_location) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_mode_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_mode: vk::CoverageModulationModeNV, ) { (self.fp.cmd_set_coverage_modulation_mode_nv)(command_buffer, coverage_modulation_mode) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_table_enable_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_table_enable: bool, ) { (self.fp.cmd_set_coverage_modulation_table_enable_nv)( command_buffer, coverage_modulation_table_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_modulation_table_nv( &self, command_buffer: vk::CommandBuffer, coverage_modulation_table: &[f32], ) { (self.fp.cmd_set_coverage_modulation_table_nv)( command_buffer, coverage_modulation_table.len() as u32, coverage_modulation_table.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_set_shading_rate_image_enable_nv( &self, command_buffer: vk::CommandBuffer, shading_rate_image_enable: bool, ) { (self.fp.cmd_set_shading_rate_image_enable_nv)( command_buffer, shading_rate_image_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_representative_fragment_test_enable_nv( &self, command_buffer: vk::CommandBuffer, representative_fragment_test_enable: bool, ) { (self.fp.cmd_set_representative_fragment_test_enable_nv)( command_buffer, representative_fragment_test_enable.into(), ) } /// #[inline] pub unsafe fn cmd_set_coverage_reduction_mode_nv( &self, command_buffer: vk::CommandBuffer, coverage_reduction_mode: vk::CoverageReductionModeNV, ) { (self.fp.cmd_set_coverage_reduction_mode_nv)(command_buffer, coverage_reduction_mode) } } ash-0.38.0+1.3.281/src/extensions/ext/swapchain_maintenance1.rs000064400000000000000000000010541046102023000220310ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::ext::swapchain_maintenance1::Device { /// #[inline] pub unsafe fn release_swapchain_images( &self, release_info: &vk::ReleaseSwapchainImagesInfoEXT<'_>, ) -> VkResult<()> { (self.fp.release_swapchain_images_ext)(self.handle, release_info).result() } } ash-0.38.0+1.3.281/src/extensions/ext/tooling_info.rs000064400000000000000000000012431046102023000201170ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; impl crate::ext::tooling_info::Instance { /// #[inline] pub unsafe fn get_physical_device_tool_properties( &self, physical_device: vk::PhysicalDevice, ) -> VkResult>> { read_into_defaulted_vector(|count, data| { (self.fp.get_physical_device_tool_properties_ext)(physical_device, count, data) }) } } ash-0.38.0+1.3.281/src/extensions/ext/vertex_input_dynamic_state.rs000064400000000000000000000015561046102023000231000ustar 00000000000000//! use crate::vk; impl crate::ext::vertex_input_dynamic_state::Device { /// #[inline] pub unsafe fn cmd_set_vertex_input( &self, command_buffer: vk::CommandBuffer, vertex_binding_descriptions: &[vk::VertexInputBindingDescription2EXT<'_>], vertex_attribute_descriptions: &[vk::VertexInputAttributeDescription2EXT<'_>], ) { (self.fp.cmd_set_vertex_input_ext)( command_buffer, vertex_binding_descriptions.len() as u32, vertex_binding_descriptions.as_ptr(), vertex_attribute_descriptions.len() as u32, vertex_attribute_descriptions.as_ptr(), ) } } ash-0.38.0+1.3.281/src/extensions/google/display_timing.rs000064400000000000000000000022221046102023000211170ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::google::display_timing::Device { /// #[inline] pub unsafe fn get_past_presentation_timing( &self, swapchain: vk::SwapchainKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_past_presentation_timing_google)(self.handle, swapchain, count, data) }) } /// #[inline] pub unsafe fn get_refresh_cycle_duration( &self, swapchain: vk::SwapchainKHR, ) -> VkResult { let mut properties = mem::MaybeUninit::uninit(); (self.fp.get_refresh_cycle_duration_google)(self.handle, swapchain, properties.as_mut_ptr()) .assume_init_on_success(properties) } } ash-0.38.0+1.3.281/src/extensions/google/mod.rs000064400000000000000000000000301046102023000166550ustar 00000000000000pub mod display_timing; ash-0.38.0+1.3.281/src/extensions/khr/acceleration_structure.rs000064400000000000000000000242561046102023000221770ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::khr::acceleration_structure::Device { /// #[inline] pub unsafe fn create_acceleration_structure( &self, create_info: &vk::AccelerationStructureCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut accel_struct = mem::MaybeUninit::uninit(); (self.fp.create_acceleration_structure_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), accel_struct.as_mut_ptr(), ) .assume_init_on_success(accel_struct) } /// #[inline] pub unsafe fn destroy_acceleration_structure( &self, accel_struct: vk::AccelerationStructureKHR, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_acceleration_structure_khr)( self.handle, accel_struct, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn cmd_build_acceleration_structures( &self, command_buffer: vk::CommandBuffer, infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>], build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]], ) { assert_eq!(infos.len(), build_range_infos.len()); let build_range_infos = build_range_infos .iter() .zip(infos.iter()) .map(|(range_info, info)| { assert_eq!(range_info.len(), info.geometry_count as usize); range_info.as_ptr() }) .collect::>(); (self.fp.cmd_build_acceleration_structures_khr)( command_buffer, infos.len() as _, infos.as_ptr(), build_range_infos.as_ptr(), ); } /// #[inline] pub unsafe fn cmd_build_acceleration_structures_indirect( &self, command_buffer: vk::CommandBuffer, infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>], indirect_device_addresses: &[vk::DeviceAddress], indirect_strides: &[u32], max_primitive_counts: &[&[u32]], ) { assert_eq!(infos.len(), indirect_device_addresses.len()); assert_eq!(infos.len(), indirect_strides.len()); assert_eq!(infos.len(), max_primitive_counts.len()); let max_primitive_counts = max_primitive_counts .iter() .zip(infos.iter()) .map(|(cnt, info)| { assert_eq!(cnt.len(), info.geometry_count as usize); cnt.as_ptr() }) .collect::>(); (self.fp.cmd_build_acceleration_structures_indirect_khr)( command_buffer, infos.len() as _, infos.as_ptr(), indirect_device_addresses.as_ptr(), indirect_strides.as_ptr(), max_primitive_counts.as_ptr(), ); } /// #[inline] pub unsafe fn build_acceleration_structures( &self, deferred_operation: vk::DeferredOperationKHR, infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>], build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]], ) -> VkResult<()> { assert_eq!(infos.len(), build_range_infos.len()); let build_range_infos = build_range_infos .iter() .zip(infos.iter()) .map(|(range_info, info)| { assert_eq!(range_info.len(), info.geometry_count as usize); range_info.as_ptr() }) .collect::>(); (self.fp.build_acceleration_structures_khr)( self.handle, deferred_operation, infos.len() as _, infos.as_ptr(), build_range_infos.as_ptr(), ) .result() } /// #[inline] pub unsafe fn copy_acceleration_structure( &self, deferred_operation: vk::DeferredOperationKHR, info: &vk::CopyAccelerationStructureInfoKHR<'_>, ) -> VkResult<()> { (self.fp.copy_acceleration_structure_khr)(self.handle, deferred_operation, info).result() } /// #[inline] pub unsafe fn copy_acceleration_structure_to_memory( &self, deferred_operation: vk::DeferredOperationKHR, info: &vk::CopyAccelerationStructureToMemoryInfoKHR<'_>, ) -> VkResult<()> { (self.fp.copy_acceleration_structure_to_memory_khr)(self.handle, deferred_operation, info) .result() } /// #[inline] pub unsafe fn copy_memory_to_acceleration_structure( &self, deferred_operation: vk::DeferredOperationKHR, info: &vk::CopyMemoryToAccelerationStructureInfoKHR<'_>, ) -> VkResult<()> { (self.fp.copy_memory_to_acceleration_structure_khr)(self.handle, deferred_operation, info) .result() } /// #[inline] pub unsafe fn write_acceleration_structures_properties( &self, acceleration_structures: &[vk::AccelerationStructureKHR], query_type: vk::QueryType, data: &mut [u8], stride: usize, ) -> VkResult<()> { (self.fp.write_acceleration_structures_properties_khr)( self.handle, acceleration_structures.len() as _, acceleration_structures.as_ptr(), query_type, data.len(), data.as_mut_ptr().cast(), stride, ) .result() } /// #[inline] pub unsafe fn cmd_copy_acceleration_structure( &self, command_buffer: vk::CommandBuffer, info: &vk::CopyAccelerationStructureInfoKHR<'_>, ) { (self.fp.cmd_copy_acceleration_structure_khr)(command_buffer, info); } /// #[inline] pub unsafe fn cmd_copy_acceleration_structure_to_memory( &self, command_buffer: vk::CommandBuffer, info: &vk::CopyAccelerationStructureToMemoryInfoKHR<'_>, ) { (self.fp.cmd_copy_acceleration_structure_to_memory_khr)(command_buffer, info); } /// #[inline] pub unsafe fn cmd_copy_memory_to_acceleration_structure( &self, command_buffer: vk::CommandBuffer, info: &vk::CopyMemoryToAccelerationStructureInfoKHR<'_>, ) { (self.fp.cmd_copy_memory_to_acceleration_structure_khr)(command_buffer, info); } /// #[inline] pub unsafe fn get_acceleration_structure_device_address( &self, info: &vk::AccelerationStructureDeviceAddressInfoKHR<'_>, ) -> vk::DeviceAddress { (self.fp.get_acceleration_structure_device_address_khr)(self.handle, info) } /// #[inline] pub unsafe fn cmd_write_acceleration_structures_properties( &self, command_buffer: vk::CommandBuffer, structures: &[vk::AccelerationStructureKHR], query_type: vk::QueryType, query_pool: vk::QueryPool, first_query: u32, ) { (self.fp.cmd_write_acceleration_structures_properties_khr)( command_buffer, structures.len() as _, structures.as_ptr(), query_type, query_pool, first_query, ); } /// #[inline] pub unsafe fn get_device_acceleration_structure_compatibility( &self, version: &vk::AccelerationStructureVersionInfoKHR<'_>, ) -> vk::AccelerationStructureCompatibilityKHR { let mut compatibility = mem::MaybeUninit::uninit(); (self.fp.get_device_acceleration_structure_compatibility_khr)( self.handle, version, compatibility.as_mut_ptr(), ); compatibility.assume_init() } /// #[inline] pub unsafe fn get_acceleration_structure_build_sizes( &self, build_type: vk::AccelerationStructureBuildTypeKHR, build_info: &vk::AccelerationStructureBuildGeometryInfoKHR<'_>, max_primitive_counts: &[u32], size_info: &mut vk::AccelerationStructureBuildSizesInfoKHR<'_>, ) { assert_eq!(max_primitive_counts.len(), build_info.geometry_count as _); (self.fp.get_acceleration_structure_build_sizes_khr)( self.handle, build_type, build_info, max_primitive_counts.as_ptr(), size_info, ) } } ash-0.38.0+1.3.281/src/extensions/khr/android_surface.rs000075500000000000000000000015221046102023000205500ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::android_surface::Instance { /// #[inline] pub unsafe fn create_android_surface( &self, create_info: &vk::AndroidSurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_android_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/khr/buffer_device_address.rs000064400000000000000000000023231046102023000217120ustar 00000000000000//! use crate::vk; impl crate::khr::buffer_device_address::Device { /// #[inline] pub unsafe fn get_buffer_device_address( &self, info: &vk::BufferDeviceAddressInfoKHR<'_>, ) -> vk::DeviceAddress { (self.fp.get_buffer_device_address_khr)(self.handle, info) } /// #[inline] pub unsafe fn get_buffer_opaque_capture_address( &self, info: &vk::BufferDeviceAddressInfoKHR<'_>, ) -> u64 { (self.fp.get_buffer_opaque_capture_address_khr)(self.handle, info) } /// #[inline] pub unsafe fn get_device_memory_opaque_capture_address( &self, info: &vk::DeviceMemoryOpaqueCaptureAddressInfoKHR<'_>, ) -> u64 { (self.fp.get_device_memory_opaque_capture_address_khr)(self.handle, info) } } ash-0.38.0+1.3.281/src/extensions/khr/calibrated_timestamps.rs000064400000000000000000000032221046102023000217540ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::khr::calibrated_timestamps::Device { /// /// /// Returns a tuple containing `(timestamps, max_deviation)` #[inline] pub unsafe fn get_calibrated_timestamps( &self, info: &[vk::CalibratedTimestampInfoKHR<'_>], ) -> VkResult<(Vec, u64)> { let mut timestamps = Vec::with_capacity(info.len()); let mut max_deviation = mem::MaybeUninit::uninit(); let max_deviation = (self.fp.get_calibrated_timestamps_khr)( self.handle, info.len() as u32, info.as_ptr(), timestamps.as_mut_ptr(), max_deviation.as_mut_ptr(), ) .assume_init_on_success(max_deviation)?; timestamps.set_len(info.len()); Ok((timestamps, max_deviation)) } } impl crate::khr::calibrated_timestamps::Instance { /// #[inline] pub unsafe fn get_physical_device_calibrateable_time_domains( &self, physical_device: vk::PhysicalDevice, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_calibrateable_time_domains_khr)( physical_device, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/khr/cooperative_matrix.rs000064400000000000000000000014701046102023000213230ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; impl crate::khr::cooperative_matrix::Instance { /// #[inline] pub unsafe fn get_physical_device_cooperative_matrix_properties( &self, physical_device: vk::PhysicalDevice, ) -> VkResult>> { read_into_defaulted_vector(|count, data| { (self .fp .get_physical_device_cooperative_matrix_properties_khr)( physical_device, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/khr/copy_commands2.rs000064400000000000000000000044761046102023000203450ustar 00000000000000//! use crate::vk; impl crate::khr::copy_commands2::Device { /// #[inline] pub unsafe fn cmd_copy_buffer2( &self, command_buffer: vk::CommandBuffer, copy_buffer_info: &vk::CopyBufferInfo2KHR<'_>, ) { (self.fp.cmd_copy_buffer2_khr)(command_buffer, copy_buffer_info) } /// #[inline] pub unsafe fn cmd_copy_image2( &self, command_buffer: vk::CommandBuffer, copy_image_info: &vk::CopyImageInfo2KHR<'_>, ) { (self.fp.cmd_copy_image2_khr)(command_buffer, copy_image_info) } /// #[inline] pub unsafe fn cmd_copy_buffer_to_image2( &self, command_buffer: vk::CommandBuffer, copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2KHR<'_>, ) { (self.fp.cmd_copy_buffer_to_image2_khr)(command_buffer, copy_buffer_to_image_info) } /// #[inline] pub unsafe fn cmd_copy_image_to_buffer2( &self, command_buffer: vk::CommandBuffer, copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2KHR<'_>, ) { (self.fp.cmd_copy_image_to_buffer2_khr)(command_buffer, copy_image_to_buffer_info) } /// #[inline] pub unsafe fn cmd_blit_image2( &self, command_buffer: vk::CommandBuffer, blit_image_info: &vk::BlitImageInfo2KHR<'_>, ) { (self.fp.cmd_blit_image2_khr)(command_buffer, blit_image_info) } /// #[inline] pub unsafe fn cmd_resolve_image2( &self, command_buffer: vk::CommandBuffer, resolve_image_info: &vk::ResolveImageInfo2KHR<'_>, ) { (self.fp.cmd_resolve_image2_khr)(command_buffer, resolve_image_info) } } ash-0.38.0+1.3.281/src/extensions/khr/create_renderpass2.rs000064400000000000000000000041201046102023000211650ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::create_renderpass2::Device { /// #[inline] pub unsafe fn create_render_pass2( &self, create_info: &vk::RenderPassCreateInfo2<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut renderpass = mem::MaybeUninit::uninit(); (self.fp.create_render_pass2_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), renderpass.as_mut_ptr(), ) .assume_init_on_success(renderpass) } /// #[inline] pub unsafe fn cmd_begin_render_pass2( &self, command_buffer: vk::CommandBuffer, render_pass_begin_info: &vk::RenderPassBeginInfo<'_>, subpass_begin_info: &vk::SubpassBeginInfo<'_>, ) { (self.fp.cmd_begin_render_pass2_khr)( command_buffer, render_pass_begin_info, subpass_begin_info, ); } /// #[inline] pub unsafe fn cmd_next_subpass2( &self, command_buffer: vk::CommandBuffer, subpass_begin_info: &vk::SubpassBeginInfo<'_>, subpass_end_info: &vk::SubpassEndInfo<'_>, ) { (self.fp.cmd_next_subpass2_khr)(command_buffer, subpass_begin_info, subpass_end_info); } /// #[inline] pub unsafe fn cmd_end_render_pass2( &self, command_buffer: vk::CommandBuffer, subpass_end_info: &vk::SubpassEndInfo<'_>, ) { (self.fp.cmd_end_render_pass2_khr)(command_buffer, subpass_end_info); } } ash-0.38.0+1.3.281/src/extensions/khr/deferred_host_operations.rs000064400000000000000000000044241046102023000225010ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::deferred_host_operations::Device { /// #[inline] pub unsafe fn create_deferred_operation( &self, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut operation = mem::MaybeUninit::uninit(); (self.fp.create_deferred_operation_khr)( self.handle, allocation_callbacks.as_raw_ptr(), operation.as_mut_ptr(), ) .assume_init_on_success(operation) } /// #[inline] pub unsafe fn deferred_operation_join( &self, operation: vk::DeferredOperationKHR, ) -> VkResult<()> { (self.fp.deferred_operation_join_khr)(self.handle, operation).result() } /// #[inline] pub unsafe fn destroy_deferred_operation( &self, operation: vk::DeferredOperationKHR, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_deferred_operation_khr)( self.handle, operation, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn get_deferred_operation_max_concurrency( &self, operation: vk::DeferredOperationKHR, ) -> u32 { (self.fp.get_deferred_operation_max_concurrency_khr)(self.handle, operation) } /// #[inline] pub unsafe fn get_deferred_operation_result( &self, operation: vk::DeferredOperationKHR, ) -> VkResult<()> { (self.fp.get_deferred_operation_result_khr)(self.handle, operation).result() } } ash-0.38.0+1.3.281/src/extensions/khr/device_group.rs000064400000000000000000000132711046102023000200740ustar 00000000000000//! #[cfg(doc)] use crate::khr; use crate::prelude::*; use crate::vk; use alloc::vec::Vec; use core::mem; impl crate::khr::device_group::Device { /// #[inline] pub unsafe fn get_device_group_peer_memory_features( &self, heap_index: u32, local_device_index: u32, remote_device_index: u32, ) -> vk::PeerMemoryFeatureFlags { let mut peer_memory_features = mem::MaybeUninit::uninit(); (self.fp.get_device_group_peer_memory_features_khr)( self.handle, heap_index, local_device_index, remote_device_index, peer_memory_features.as_mut_ptr(), ); peer_memory_features.assume_init() } /// #[inline] pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) { (self.fp.cmd_set_device_mask_khr)(command_buffer, device_mask) } /// #[inline] pub unsafe fn cmd_dispatch_base( &self, command_buffer: vk::CommandBuffer, base_group: (u32, u32, u32), group_count: (u32, u32, u32), ) { (self.fp.cmd_dispatch_base_khr)( command_buffer, base_group.0, base_group.1, base_group.2, group_count.0, group_count.1, group_count.2, ) } /// Requires [`VK_KHR_surface`] to be enabled. /// /// Also available as [`khr::swapchain::Device::get_device_group_present_capabilities()`] since [Vulkan 1.1]. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_device_group_present_capabilities( &self, device_group_present_capabilities: &mut vk::DeviceGroupPresentCapabilitiesKHR<'_>, ) -> VkResult<()> { (self.fp.get_device_group_present_capabilities_khr)( self.handle, device_group_present_capabilities, ) .result() } /// Requires [`VK_KHR_surface`] to be enabled. /// /// Also available as [`khr::swapchain::Device::get_device_group_surface_present_modes()`] since [Vulkan 1.1]. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_device_group_surface_present_modes( &self, surface: vk::SurfaceKHR, ) -> VkResult { let mut modes = mem::MaybeUninit::uninit(); (self.fp.get_device_group_surface_present_modes_khr)( self.handle, surface, modes.as_mut_ptr(), ) .assume_init_on_success(modes) } /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface. /// /// Requires [`VK_KHR_swapchain`] to be enabled. /// /// Also available as [`khr::swapchain::Device::acquire_next_image2()`] since [Vulkan 1.1]. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_swapchain`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_swapchain.html #[inline] pub unsafe fn acquire_next_image2( &self, acquire_info: &vk::AcquireNextImageInfoKHR<'_>, ) -> VkResult<(u32, bool)> { let mut index = mem::MaybeUninit::uninit(); let err_code = (self.fp.acquire_next_image2_khr)(self.handle, acquire_info, index.as_mut_ptr()); match err_code { vk::Result::SUCCESS => Ok((index.assume_init(), false)), vk::Result::SUBOPTIMAL_KHR => Ok((index.assume_init(), true)), _ => Err(err_code), } } } impl crate::khr::device_group::Instance { /// Requires [`VK_KHR_surface`] to be enabled. /// /// Also available as [`khr::swapchain::Instance::get_physical_device_present_rectangles()`] since [Vulkan 1.1]. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_physical_device_present_rectangles( &self, physical_device: vk::PhysicalDevice, surface: vk::SurfaceKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_present_rectangles_khr)( physical_device, surface, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/khr/device_group_creation.rs000064400000000000000000000030021046102023000217470ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; use core::ptr; impl crate::khr::device_group_creation::Instance { /// Retrieve the number of elements to pass to [`enumerate_physical_device_groups()`][Self::enumerate_physical_device_groups()] #[inline] pub unsafe fn enumerate_physical_device_groups_len(&self) -> VkResult { let mut group_count = mem::MaybeUninit::uninit(); (self.fp.enumerate_physical_device_groups_khr)( self.handle, group_count.as_mut_ptr(), ptr::null_mut(), ) .assume_init_on_success(group_count) .map(|c| c as usize) } /// /// /// Call [`enumerate_physical_device_groups_len()`][Self::enumerate_physical_device_groups_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn enumerate_physical_device_groups( &self, out: &mut [vk::PhysicalDeviceGroupProperties<'_>], ) -> VkResult<()> { let mut count = out.len() as u32; (self.fp.enumerate_physical_device_groups_khr)(self.handle, &mut count, out.as_mut_ptr()) .result()?; assert_eq!(count as usize, out.len()); Ok(()) } } ash-0.38.0+1.3.281/src/extensions/khr/display.rs000075500000000000000000000103421046102023000170650ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::khr::display::Instance { /// #[inline] pub unsafe fn get_physical_device_display_properties( &self, physical_device: vk::PhysicalDevice, ) -> VkResult>> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_display_properties_khr)(physical_device, count, data) }) } /// #[inline] pub unsafe fn get_physical_device_display_plane_properties( &self, physical_device: vk::PhysicalDevice, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_display_plane_properties_khr)(physical_device, count, data) }) } /// #[inline] pub unsafe fn get_display_plane_supported_displays( &self, physical_device: vk::PhysicalDevice, plane_index: u32, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_display_plane_supported_displays_khr)( physical_device, plane_index, count, data, ) }) } /// #[inline] pub unsafe fn get_display_mode_properties( &self, physical_device: vk::PhysicalDevice, display: vk::DisplayKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_display_mode_properties_khr)(physical_device, display, count, data) }) } /// #[inline] pub unsafe fn create_display_mode( &self, physical_device: vk::PhysicalDevice, display: vk::DisplayKHR, create_info: &vk::DisplayModeCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut display_mode = mem::MaybeUninit::uninit(); (self.fp.create_display_mode_khr)( physical_device, display, create_info, allocation_callbacks.as_raw_ptr(), display_mode.as_mut_ptr(), ) .assume_init_on_success(display_mode) } /// #[inline] pub unsafe fn get_display_plane_capabilities( &self, physical_device: vk::PhysicalDevice, mode: vk::DisplayModeKHR, plane_index: u32, ) -> VkResult { let mut display_plane_capabilities = mem::MaybeUninit::uninit(); (self.fp.get_display_plane_capabilities_khr)( physical_device, mode, plane_index, display_plane_capabilities.as_mut_ptr(), ) .assume_init_on_success(display_plane_capabilities) } /// #[inline] pub unsafe fn create_display_plane_surface( &self, create_info: &vk::DisplaySurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_display_plane_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/khr/display_swapchain.rs000075500000000000000000000016771046102023000211350ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; impl crate::khr::display_swapchain::Device { /// #[inline] pub unsafe fn create_shared_swapchains( &self, create_infos: &[vk::SwapchainCreateInfoKHR<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult> { let mut swapchains = Vec::with_capacity(create_infos.len()); (self.fp.create_shared_swapchains_khr)( self.handle, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), swapchains.as_mut_ptr(), ) .set_vec_len_on_success(swapchains, create_infos.len()) } } ash-0.38.0+1.3.281/src/extensions/khr/draw_indirect_count.rs000064400000000000000000000027461046102023000214540ustar 00000000000000//! use crate::vk; impl crate::khr::draw_indirect_count::Device { /// #[inline] pub unsafe fn cmd_draw_indexed_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_indexed_indirect_count_khr)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ); } /// #[inline] pub unsafe fn cmd_draw_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_indirect_count_khr)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ); } } ash-0.38.0+1.3.281/src/extensions/khr/dynamic_rendering.rs000064400000000000000000000014131046102023000210750ustar 00000000000000//! use crate::vk; impl crate::khr::dynamic_rendering::Device { /// #[inline] pub unsafe fn cmd_begin_rendering( &self, command_buffer: vk::CommandBuffer, rendering_info: &vk::RenderingInfoKHR<'_>, ) { (self.fp.cmd_begin_rendering_khr)(command_buffer, rendering_info) } /// #[inline] pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) { (self.fp.cmd_end_rendering_khr)(command_buffer) } } ash-0.38.0+1.3.281/src/extensions/khr/dynamic_rendering_local_read.rs000064400000000000000000000020341046102023000232420ustar 00000000000000//! use crate::vk; impl crate::khr::dynamic_rendering_local_read::Device { /// #[inline] pub unsafe fn cmd_set_rendering_attachment_locations( &self, command_buffer: vk::CommandBuffer, location_info: &vk::RenderingAttachmentLocationInfoKHR<'_>, ) { (self.fp.cmd_set_rendering_attachment_locations_khr)(command_buffer, location_info) } /// #[inline] pub unsafe fn cmd_set_rendering_input_attachment_indices( &self, command_buffer: vk::CommandBuffer, location_info: &vk::RenderingInputAttachmentIndexInfoKHR<'_>, ) { (self.fp.cmd_set_rendering_input_attachment_indices_khr)(command_buffer, location_info) } } ash-0.38.0+1.3.281/src/extensions/khr/external_fence_fd.rs000064400000000000000000000016041046102023000210510ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_fence_fd::Device { /// #[inline] pub unsafe fn import_fence_fd( &self, import_info: &vk::ImportFenceFdInfoKHR<'_>, ) -> VkResult<()> { (self.fp.import_fence_fd_khr)(self.handle, import_info).result() } /// #[inline] pub unsafe fn get_fence_fd(&self, get_info: &vk::FenceGetFdInfoKHR<'_>) -> VkResult { let mut fd = mem::MaybeUninit::uninit(); (self.fp.get_fence_fd_khr)(self.handle, get_info, fd.as_mut_ptr()) .assume_init_on_success(fd) } } ash-0.38.0+1.3.281/src/extensions/khr/external_fence_win32.rs000064400000000000000000000020001046102023000214110ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_fence_win32::Device { /// #[inline] pub unsafe fn import_fence_win32_handle( &self, import_info: &vk::ImportFenceWin32HandleInfoKHR<'_>, ) -> VkResult<()> { (self.fp.import_fence_win32_handle_khr)(self.handle, import_info).result() } /// #[inline] pub unsafe fn get_fence_win32_handle( &self, get_info: &vk::FenceGetWin32HandleInfoKHR<'_>, ) -> VkResult { let mut handle = mem::MaybeUninit::uninit(); (self.fp.get_fence_win32_handle_khr)(self.handle, get_info, handle.as_mut_ptr()) .assume_init_on_success(handle) } } ash-0.38.0+1.3.281/src/extensions/khr/external_memory_fd.rs000064400000000000000000000020501046102023000212750ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_memory_fd::Device { /// #[inline] pub unsafe fn get_memory_fd(&self, get_fd_info: &vk::MemoryGetFdInfoKHR<'_>) -> VkResult { let mut fd = mem::MaybeUninit::uninit(); (self.fp.get_memory_fd_khr)(self.handle, get_fd_info, fd.as_mut_ptr()) .assume_init_on_success(fd) } /// #[inline] pub unsafe fn get_memory_fd_properties( &self, handle_type: vk::ExternalMemoryHandleTypeFlags, fd: i32, memory_fd_properties: &mut vk::MemoryFdPropertiesKHR<'_>, ) -> VkResult<()> { (self.fp.get_memory_fd_properties_khr)(self.handle, handle_type, fd, memory_fd_properties) .result() } } ash-0.38.0+1.3.281/src/extensions/khr/external_memory_win32.rs000064400000000000000000000023761046102023000216610ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_memory_win32::Device { /// #[inline] pub unsafe fn get_memory_win32_handle( &self, create_info: &vk::MemoryGetWin32HandleInfoKHR<'_>, ) -> VkResult { let mut handle = mem::MaybeUninit::uninit(); (self.fp.get_memory_win32_handle_khr)(self.handle, create_info, handle.as_mut_ptr()) .assume_init_on_success(handle) } /// #[inline] pub unsafe fn get_memory_win32_handle_properties( &self, handle_type: vk::ExternalMemoryHandleTypeFlags, handle: vk::HANDLE, memory_win32_handle_properties: &mut vk::MemoryWin32HandlePropertiesKHR<'_>, ) -> VkResult<()> { (self.fp.get_memory_win32_handle_properties_khr)( self.handle, handle_type, handle, memory_win32_handle_properties, ) .result() } } ash-0.38.0+1.3.281/src/extensions/khr/external_semaphore_fd.rs000064400000000000000000000017031046102023000217540ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_semaphore_fd::Device { /// #[inline] pub unsafe fn import_semaphore_fd( &self, import_info: &vk::ImportSemaphoreFdInfoKHR<'_>, ) -> VkResult<()> { (self.fp.import_semaphore_fd_khr)(self.handle, import_info).result() } /// #[inline] pub unsafe fn get_semaphore_fd( &self, get_info: &vk::SemaphoreGetFdInfoKHR<'_>, ) -> VkResult { let mut fd = mem::MaybeUninit::uninit(); (self.fp.get_semaphore_fd_khr)(self.handle, get_info, fd.as_mut_ptr()) .assume_init_on_success(fd) } } ash-0.38.0+1.3.281/src/extensions/khr/external_semaphore_win32.rs000064400000000000000000000020501046102023000223210ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::external_semaphore_win32::Device { /// #[inline] pub unsafe fn import_semaphore_win32_handle( &self, import_info: &vk::ImportSemaphoreWin32HandleInfoKHR<'_>, ) -> VkResult<()> { (self.fp.import_semaphore_win32_handle_khr)(self.handle, import_info).result() } /// #[inline] pub unsafe fn get_semaphore_win32_handle( &self, get_info: &vk::SemaphoreGetWin32HandleInfoKHR<'_>, ) -> VkResult { let mut handle = mem::MaybeUninit::uninit(); (self.fp.get_semaphore_win32_handle_khr)(self.handle, get_info, handle.as_mut_ptr()) .assume_init_on_success(handle) } } ash-0.38.0+1.3.281/src/extensions/khr/get_memory_requirements2.rs000064400000000000000000000047411046102023000224570ustar 00000000000000//! use crate::vk; use core::mem; use core::ptr; impl crate::khr::get_memory_requirements2::Device { /// #[inline] pub unsafe fn get_buffer_memory_requirements2( &self, info: &vk::BufferMemoryRequirementsInfo2KHR<'_>, memory_requirements: &mut vk::MemoryRequirements2KHR<'_>, ) { (self.fp.get_buffer_memory_requirements2_khr)(self.handle, info, memory_requirements); } /// #[inline] pub unsafe fn get_image_memory_requirements2( &self, info: &vk::ImageMemoryRequirementsInfo2KHR<'_>, memory_requirements: &mut vk::MemoryRequirements2KHR<'_>, ) { (self.fp.get_image_memory_requirements2_khr)(self.handle, info, memory_requirements); } /// Retrieve the number of elements to pass to [`get_image_sparse_memory_requirements2()`][Self::get_image_sparse_memory_requirements2()] #[inline] pub unsafe fn get_image_sparse_memory_requirements2_len( &self, info: &vk::ImageSparseMemoryRequirementsInfo2KHR<'_>, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self.fp.get_image_sparse_memory_requirements2_khr)( self.handle, info, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_image_sparse_memory_requirements2_len()`][Self::get_image_sparse_memory_requirements2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_image_sparse_memory_requirements2( &self, info: &vk::ImageSparseMemoryRequirementsInfo2KHR<'_>, out: &mut [vk::SparseImageMemoryRequirements2KHR<'_>], ) { let mut count = out.len() as u32; (self.fp.get_image_sparse_memory_requirements2_khr)( self.handle, info, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } } ash-0.38.0+1.3.281/src/extensions/khr/get_physical_device_properties2.rs000064400000000000000000000133411046102023000237470ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; use core::ptr; impl crate::khr::get_physical_device_properties2::Instance { /// #[inline] pub unsafe fn get_physical_device_features2( &self, physical_device: vk::PhysicalDevice, features: &mut vk::PhysicalDeviceFeatures2KHR<'_>, ) { (self.fp.get_physical_device_features2_khr)(physical_device, features); } /// #[inline] pub unsafe fn get_physical_device_format_properties2( &self, physical_device: vk::PhysicalDevice, format: vk::Format, format_properties: &mut vk::FormatProperties2KHR<'_>, ) { (self.fp.get_physical_device_format_properties2_khr)( physical_device, format, format_properties, ); } /// #[inline] pub unsafe fn get_physical_device_image_format_properties2( &self, physical_device: vk::PhysicalDevice, image_format_info: &vk::PhysicalDeviceImageFormatInfo2KHR<'_>, image_format_properties: &mut vk::ImageFormatProperties2KHR<'_>, ) -> VkResult<()> { (self.fp.get_physical_device_image_format_properties2_khr)( physical_device, image_format_info, image_format_properties, ) .result() } /// #[inline] pub unsafe fn get_physical_device_memory_properties2( &self, physical_device: vk::PhysicalDevice, memory_properties: &mut vk::PhysicalDeviceMemoryProperties2KHR<'_>, ) { (self.fp.get_physical_device_memory_properties2_khr)(physical_device, memory_properties); } /// #[inline] pub unsafe fn get_physical_device_properties2( &self, physical_device: vk::PhysicalDevice, properties: &mut vk::PhysicalDeviceProperties2KHR<'_>, ) { (self.fp.get_physical_device_properties2_khr)(physical_device, properties); } /// Retrieve the number of elements to pass to [`get_physical_device_queue_family_properties2()`][Self::get_physical_device_queue_family_properties2()] #[inline] pub unsafe fn get_physical_device_queue_family_properties2_len( &self, physical_device: vk::PhysicalDevice, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self.fp.get_physical_device_queue_family_properties2_khr)( physical_device, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_physical_device_queue_family_properties2_len()`][Self::get_physical_device_queue_family_properties2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_queue_family_properties2( &self, physical_device: vk::PhysicalDevice, out: &mut [vk::QueueFamilyProperties2KHR<'_>], ) { let mut count = out.len() as u32; (self.fp.get_physical_device_queue_family_properties2_khr)( physical_device, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } /// Retrieve the number of elements to pass to [`get_physical_device_sparse_image_format_properties2()`][Self::get_physical_device_sparse_image_format_properties2()] #[inline] pub unsafe fn get_physical_device_sparse_image_format_properties2_len( &self, physical_device: vk::PhysicalDevice, format_info: &vk::PhysicalDeviceSparseImageFormatInfo2KHR<'_>, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self .fp .get_physical_device_sparse_image_format_properties2_khr)( physical_device, format_info, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_physical_device_sparse_image_format_properties2_len()`][Self::get_physical_device_sparse_image_format_properties2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_sparse_image_format_properties2( &self, physical_device: vk::PhysicalDevice, format_info: &vk::PhysicalDeviceSparseImageFormatInfo2KHR<'_>, out: &mut [vk::SparseImageFormatProperties2KHR<'_>], ) { let mut count = out.len() as u32; (self .fp .get_physical_device_sparse_image_format_properties2_khr)( physical_device, format_info, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } } ash-0.38.0+1.3.281/src/extensions/khr/get_surface_capabilities2.rs000064400000000000000000000047131046102023000225040ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; use core::ptr; impl crate::khr::get_surface_capabilities2::Instance { /// #[inline] pub unsafe fn get_physical_device_surface_capabilities2( &self, physical_device: vk::PhysicalDevice, surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR<'_>, surface_capabilities: &mut vk::SurfaceCapabilities2KHR<'_>, ) -> VkResult<()> { (self.fp.get_physical_device_surface_capabilities2_khr)( physical_device, surface_info, surface_capabilities, ) .result() } /// Retrieve the number of elements to pass to [`get_physical_device_surface_formats2()`][Self::get_physical_device_surface_formats2()] #[inline] pub unsafe fn get_physical_device_surface_formats2_len( &self, physical_device: vk::PhysicalDevice, surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR<'_>, ) -> VkResult { let mut count = mem::MaybeUninit::uninit(); let err_code = (self.fp.get_physical_device_surface_formats2_khr)( physical_device, surface_info, count.as_mut_ptr(), ptr::null_mut(), ); err_code.assume_init_on_success(count).map(|c| c as usize) } /// /// /// Call [`get_physical_device_surface_formats2_len()`][Self::get_physical_device_surface_formats2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_surface_formats2( &self, physical_device: vk::PhysicalDevice, surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR<'_>, out: &mut [vk::SurfaceFormat2KHR<'_>], ) -> VkResult<()> { let mut count = out.len() as u32; let err_code = (self.fp.get_physical_device_surface_formats2_khr)( physical_device, surface_info, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); err_code.result() } } ash-0.38.0+1.3.281/src/extensions/khr/line_rasterization.rs000064400000000000000000000011501046102023000213170ustar 00000000000000//! use crate::vk; impl crate::khr::line_rasterization::Device { /// #[inline] pub unsafe fn cmd_set_line_stipple( &self, command_buffer: vk::CommandBuffer, line_stipple_factor: u32, line_stipple_pattern: u16, ) { (self.fp.cmd_set_line_stipple_khr)( command_buffer, line_stipple_factor, line_stipple_pattern, ) } } ash-0.38.0+1.3.281/src/extensions/khr/maintenance1.rs000064400000000000000000000007601046102023000177630ustar 00000000000000//! use crate::vk; impl crate::khr::maintenance1::Device { /// #[inline] pub unsafe fn trim_command_pool( &self, command_pool: vk::CommandPool, flags: vk::CommandPoolTrimFlagsKHR, ) { (self.fp.trim_command_pool_khr)(self.handle, command_pool, flags); } } ash-0.38.0+1.3.281/src/extensions/khr/maintenance3.rs000064400000000000000000000010761046102023000177660ustar 00000000000000//! use crate::vk; impl crate::khr::maintenance3::Device { /// #[inline] pub unsafe fn get_descriptor_set_layout_support( &self, create_info: &vk::DescriptorSetLayoutCreateInfo<'_>, out: &mut vk::DescriptorSetLayoutSupportKHR<'_>, ) { (self.fp.get_descriptor_set_layout_support_khr)(self.handle, create_info, out); } } ash-0.38.0+1.3.281/src/extensions/khr/maintenance4.rs000064400000000000000000000051051046102023000177640ustar 00000000000000//! use crate::vk; use core::mem; use core::ptr; impl crate::khr::maintenance4::Device { /// #[inline] pub unsafe fn get_device_buffer_memory_requirements( &self, memory_requirements: &vk::DeviceBufferMemoryRequirementsKHR<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.fp.get_device_buffer_memory_requirements_khr)(self.handle, memory_requirements, out) } /// #[inline] pub unsafe fn get_device_image_memory_requirements( &self, memory_requirements: &vk::DeviceImageMemoryRequirementsKHR<'_>, out: &mut vk::MemoryRequirements2<'_>, ) { (self.fp.get_device_image_memory_requirements_khr)(self.handle, memory_requirements, out) } /// Retrieve the number of elements to pass to [`get_device_image_sparse_memory_requirements()`][Self::get_device_image_sparse_memory_requirements()] #[inline] pub unsafe fn get_device_image_sparse_memory_requirements_len( &self, memory_requirements: &vk::DeviceImageMemoryRequirementsKHR<'_>, ) -> usize { let mut count = mem::MaybeUninit::uninit(); (self.fp.get_device_image_sparse_memory_requirements_khr)( self.handle, memory_requirements, count.as_mut_ptr(), ptr::null_mut(), ); count.assume_init() as usize } /// /// /// Call [`get_device_image_sparse_memory_requirements_len()`][Self::get_device_image_sparse_memory_requirements_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_device_image_sparse_memory_requirements( &self, memory_requirements: &vk::DeviceImageMemoryRequirementsKHR<'_>, out: &mut [vk::SparseImageMemoryRequirements2<'_>], ) { let mut count = out.len() as u32; (self.fp.get_device_image_sparse_memory_requirements_khr)( self.handle, memory_requirements, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } } ash-0.38.0+1.3.281/src/extensions/khr/maintenance5.rs000064400000000000000000000051171046102023000177700ustar 00000000000000//! #[cfg(doc)] use crate::ext; use crate::vk; use core::mem; impl crate::khr::maintenance5::Device { /// #[inline] pub unsafe fn cmd_bind_index_buffer2( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, size: vk::DeviceSize, index_type: vk::IndexType, ) { (self.fp.cmd_bind_index_buffer2_khr)(command_buffer, buffer, offset, size, index_type) } /// #[inline] pub unsafe fn get_rendering_area_granularity( &self, rendering_area_info: &vk::RenderingAreaInfoKHR<'_>, ) -> vk::Extent2D { let mut granularity = mem::MaybeUninit::uninit(); (self.fp.get_rendering_area_granularity_khr)( self.handle, rendering_area_info, granularity.as_mut_ptr(), ); granularity.assume_init() } /// #[inline] pub unsafe fn get_device_image_subresource_layout( &self, info: &vk::DeviceImageSubresourceInfoKHR<'_>, layout: &mut vk::SubresourceLayout2KHR<'_>, ) { (self.fp.get_device_image_subresource_layout_khr)(self.handle, info, layout) } /// /// /// Also available as [`ext::host_image_copy::Device::get_image_subresource_layout2()`] /// when [`VK_EXT_host_image_copy`] is enabled. /// /// Also available as [`ext::image_compression_control::Device::get_image_subresource_layout2()`] /// when [`VK_EXT_image_compression_control`] is enabled. /// /// [`VK_EXT_host_image_copy`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_host_image_copy.html /// [`VK_EXT_image_compression_control`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_image_compression_control.html #[inline] pub unsafe fn get_image_subresource_layout2( &self, image: vk::Image, subresource: &vk::ImageSubresource2KHR<'_>, layout: &mut vk::SubresourceLayout2KHR<'_>, ) { (self.fp.get_image_subresource_layout2_khr)(self.handle, image, subresource, layout) } } ash-0.38.0+1.3.281/src/extensions/khr/maintenance6.rs000064400000000000000000000055001046102023000177650ustar 00000000000000//! use crate::vk; impl crate::khr::maintenance6::Device { /// #[inline] pub unsafe fn cmd_bind_descriptor_sets2( &self, command_buffer: vk::CommandBuffer, bind_descriptor_sets_info: &vk::BindDescriptorSetsInfoKHR<'_>, ) { (self.fp.cmd_bind_descriptor_sets2_khr)(command_buffer, bind_descriptor_sets_info) } /// #[inline] pub unsafe fn cmd_push_constants2( &self, command_buffer: vk::CommandBuffer, push_constants_info: &vk::PushConstantsInfoKHR<'_>, ) { (self.fp.cmd_push_constants2_khr)(command_buffer, push_constants_info) } /// #[inline] pub unsafe fn cmd_push_descriptor_set2( &self, command_buffer: vk::CommandBuffer, push_descriptor_set_info: &vk::PushDescriptorSetInfoKHR<'_>, ) { (self.fp.cmd_push_descriptor_set2_khr)(command_buffer, push_descriptor_set_info) } /// #[inline] pub unsafe fn cmd_push_descriptor_set_with_template2( &self, command_buffer: vk::CommandBuffer, push_descriptor_set_with_template_info: &vk::PushDescriptorSetWithTemplateInfoKHR<'_>, ) { (self.fp.cmd_push_descriptor_set_with_template2_khr)( command_buffer, push_descriptor_set_with_template_info, ) } /// #[inline] pub unsafe fn cmd_set_descriptor_buffer_offsets2( &self, command_buffer: vk::CommandBuffer, set_descriptor_buffer_offsets_info: &vk::SetDescriptorBufferOffsetsInfoEXT<'_>, ) { (self.fp.cmd_set_descriptor_buffer_offsets2_ext)( command_buffer, set_descriptor_buffer_offsets_info, ) } /// #[inline] pub unsafe fn cmd_bind_descriptor_buffer_embedded_samplers2( &self, command_buffer: vk::CommandBuffer, bind_descriptor_buffer_embedded_samplers_info: &vk::BindDescriptorBufferEmbeddedSamplersInfoEXT<'_>, ) { (self.fp.cmd_bind_descriptor_buffer_embedded_samplers2_ext)( command_buffer, bind_descriptor_buffer_embedded_samplers_info, ) } } ash-0.38.0+1.3.281/src/extensions/khr/mod.rs000064400000000000000000000023361046102023000162000ustar 00000000000000pub mod acceleration_structure; pub mod android_surface; pub mod buffer_device_address; pub mod calibrated_timestamps; pub mod cooperative_matrix; pub mod copy_commands2; pub mod create_renderpass2; pub mod deferred_host_operations; pub mod device_group; pub mod device_group_creation; pub mod display; pub mod display_swapchain; pub mod draw_indirect_count; pub mod dynamic_rendering; pub mod dynamic_rendering_local_read; pub mod external_fence_fd; pub mod external_fence_win32; pub mod external_memory_fd; pub mod external_memory_win32; pub mod external_semaphore_fd; pub mod external_semaphore_win32; pub mod get_memory_requirements2; pub mod get_physical_device_properties2; pub mod get_surface_capabilities2; pub mod line_rasterization; pub mod maintenance1; pub mod maintenance3; pub mod maintenance4; pub mod maintenance5; pub mod maintenance6; pub mod performance_query; pub mod pipeline_executable_properties; pub mod present_wait; pub mod push_descriptor; pub mod ray_tracing_maintenance1; pub mod ray_tracing_pipeline; pub mod sampler_ycbcr_conversion; pub mod surface; pub mod swapchain; pub mod synchronization2; pub mod timeline_semaphore; pub mod wayland_surface; pub mod win32_surface; pub mod xcb_surface; pub mod xlib_surface; ash-0.38.0+1.3.281/src/extensions/khr/performance_query.rs000064400000000000000000000074011046102023000211450ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; use core::ptr; impl crate::khr::performance_query::Device { /// #[inline] pub unsafe fn acquire_profiling_lock( &self, info: &vk::AcquireProfilingLockInfoKHR<'_>, ) -> VkResult<()> { (self.fp.acquire_profiling_lock_khr)(self.handle, info).result() } /// #[inline] pub unsafe fn release_profiling_lock(&self) { (self.fp.release_profiling_lock_khr)(self.handle) } } impl crate::khr::performance_query::Instance { /// Retrieve the number of elements to pass to [`enumerate_physical_device_queue_family_performance_query_counters()`][Self::enumerate_physical_device_queue_family_performance_query_counters()] #[inline] pub unsafe fn enumerate_physical_device_queue_family_performance_query_counters_len( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, ) -> VkResult { let mut count = mem::MaybeUninit::uninit(); (self .fp .enumerate_physical_device_queue_family_performance_query_counters_khr)( physical_device, queue_family_index, count.as_mut_ptr(), ptr::null_mut(), ptr::null_mut(), ) .assume_init_on_success(count) .map(|c| c as usize) } /// /// /// Call [`enumerate_physical_device_queue_family_performance_query_counters_len()`][Self::enumerate_physical_device_queue_family_performance_query_counters_len()] to query the number of elements to pass to `out_counters` and `out_counter_descriptions`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn enumerate_physical_device_queue_family_performance_query_counters( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, out_counters: &mut [vk::PerformanceCounterKHR<'_>], out_counter_descriptions: &mut [vk::PerformanceCounterDescriptionKHR<'_>], ) -> VkResult<()> { assert_eq!(out_counters.len(), out_counter_descriptions.len()); let mut count = out_counters.len() as u32; (self .fp .enumerate_physical_device_queue_family_performance_query_counters_khr)( physical_device, queue_family_index, &mut count, out_counters.as_mut_ptr(), out_counter_descriptions.as_mut_ptr(), ) .result()?; assert_eq!(count as usize, out_counters.len()); assert_eq!(count as usize, out_counter_descriptions.len()); Ok(()) } /// #[inline] pub unsafe fn get_physical_device_queue_family_performance_query_passes( &self, physical_device: vk::PhysicalDevice, performance_query_create_info: &vk::QueryPoolPerformanceCreateInfoKHR<'_>, ) -> u32 { let mut num_passes = mem::MaybeUninit::uninit(); (self .fp .get_physical_device_queue_family_performance_query_passes_khr)( physical_device, performance_query_create_info, num_passes.as_mut_ptr(), ); num_passes.assume_init() } } ash-0.38.0+1.3.281/src/extensions/khr/pipeline_executable_properties.rs000064400000000000000000000037351046102023000237070ustar 00000000000000//! use crate::prelude::*; use crate::vk; use alloc::vec::Vec; impl crate::khr::pipeline_executable_properties::Device { /// #[inline] pub unsafe fn get_pipeline_executable_internal_representations( &self, executable_info: &vk::PipelineExecutableInfoKHR<'_>, ) -> VkResult>> { read_into_defaulted_vector(|count, data| { (self.fp.get_pipeline_executable_internal_representations_khr)( self.handle, executable_info, count, data, ) }) } /// #[inline] pub unsafe fn get_pipeline_executable_properties( &self, pipeline_info: &vk::PipelineInfoKHR<'_>, ) -> VkResult>> { read_into_defaulted_vector(|count, data| { (self.fp.get_pipeline_executable_properties_khr)( self.handle, pipeline_info, count, data, ) }) } /// #[inline] pub unsafe fn get_pipeline_executable_statistics( &self, executable_info: &vk::PipelineExecutableInfoKHR<'_>, ) -> VkResult>> { read_into_defaulted_vector(|count, data| { (self.fp.get_pipeline_executable_statistics_khr)( self.handle, executable_info, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/khr/present_wait.rs000064400000000000000000000010501046102023000201150ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::khr::present_wait::Device { /// #[inline] pub unsafe fn wait_for_present( &self, swapchain: vk::SwapchainKHR, present_id: u64, timeout: u64, ) -> VkResult<()> { (self.fp.wait_for_present_khr)(self.handle, swapchain, present_id, timeout).result() } } ash-0.38.0+1.3.281/src/extensions/khr/push_descriptor.rs000064400000000000000000000026271046102023000206410ustar 00000000000000//! use crate::vk; use core::ffi; impl crate::khr::push_descriptor::Device { /// #[inline] pub unsafe fn cmd_push_descriptor_set( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, layout: vk::PipelineLayout, set: u32, descriptor_writes: &[vk::WriteDescriptorSet<'_>], ) { (self.fp.cmd_push_descriptor_set_khr)( command_buffer, pipeline_bind_point, layout, set, descriptor_writes.len() as u32, descriptor_writes.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_push_descriptor_set_with_template( &self, command_buffer: vk::CommandBuffer, descriptor_update_template: vk::DescriptorUpdateTemplate, layout: vk::PipelineLayout, set: u32, p_data: *const ffi::c_void, ) { (self.fp.cmd_push_descriptor_set_with_template_khr)( command_buffer, descriptor_update_template, layout, set, p_data, ) } } ash-0.38.0+1.3.281/src/extensions/khr/ray_tracing_maintenance1.rs000064400000000000000000000013401046102023000223400ustar 00000000000000//! use crate::vk; impl crate::khr::ray_tracing_maintenance1::Device { /// /// /// `indirect_device_address` is a buffer device address which is a pointer to a [`vk::TraceRaysIndirectCommand2KHR`] structure containing the trace ray parameters. #[inline] pub unsafe fn cmd_trace_rays_indirect2( &self, command_buffer: vk::CommandBuffer, indirect_device_address: vk::DeviceAddress, ) { (self.fp.cmd_trace_rays_indirect2_khr)(command_buffer, indirect_device_address); } } ash-0.38.0+1.3.281/src/extensions/khr/ray_tracing_pipeline.rs000064400000000000000000000132401046102023000216040ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; impl crate::khr::ray_tracing_pipeline::Device { /// #[inline] pub unsafe fn cmd_trace_rays( &self, command_buffer: vk::CommandBuffer, raygen_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR, miss_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR, hit_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR, callable_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR, width: u32, height: u32, depth: u32, ) { (self.fp.cmd_trace_rays_khr)( command_buffer, raygen_shader_binding_tables, miss_shader_binding_tables, hit_shader_binding_tables, callable_shader_binding_tables, width, height, depth, ); } /// /// /// Pipelines are created and returned as described for [Multiple Pipeline Creation]. /// /// [Multiple Pipeline Creation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-multiple #[inline] pub unsafe fn create_ray_tracing_pipelines( &self, deferred_operation: vk::DeferredOperationKHR, pipeline_cache: vk::PipelineCache, create_infos: &[vk::RayTracingPipelineCreateInfoKHR<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut pipelines = Vec::with_capacity(create_infos.len()); let err_code = (self.fp.create_ray_tracing_pipelines_khr)( self.handle, deferred_operation, pipeline_cache, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), pipelines.as_mut_ptr(), ); pipelines.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(pipelines), _ => Err((pipelines, err_code)), } } /// #[inline] pub unsafe fn get_ray_tracing_shader_group_handles( &self, pipeline: vk::Pipeline, first_group: u32, group_count: u32, data_size: usize, ) -> VkResult> { let mut data = Vec::::with_capacity(data_size); (self.fp.get_ray_tracing_shader_group_handles_khr)( self.handle, pipeline, first_group, group_count, data_size, data.as_mut_ptr().cast(), ) .set_vec_len_on_success(data, data_size) } /// #[inline] pub unsafe fn get_ray_tracing_capture_replay_shader_group_handles( &self, pipeline: vk::Pipeline, first_group: u32, group_count: u32, data_size: usize, ) -> VkResult> { let mut data = Vec::::with_capacity(data_size); (self .fp .get_ray_tracing_capture_replay_shader_group_handles_khr)( self.handle, pipeline, first_group, group_count, data_size, data.as_mut_ptr().cast(), ) .set_vec_len_on_success(data, data_size) } /// /// /// `indirect_device_address` is a buffer device address which is a pointer to a [`vk::TraceRaysIndirectCommandKHR`] structure containing the trace ray parameters. #[inline] pub unsafe fn cmd_trace_rays_indirect( &self, command_buffer: vk::CommandBuffer, raygen_shader_binding_table: &vk::StridedDeviceAddressRegionKHR, miss_shader_binding_table: &vk::StridedDeviceAddressRegionKHR, hit_shader_binding_table: &vk::StridedDeviceAddressRegionKHR, callable_shader_binding_table: &vk::StridedDeviceAddressRegionKHR, indirect_device_address: vk::DeviceAddress, ) { (self.fp.cmd_trace_rays_indirect_khr)( command_buffer, raygen_shader_binding_table, miss_shader_binding_table, hit_shader_binding_table, callable_shader_binding_table, indirect_device_address, ); } /// #[inline] pub unsafe fn get_ray_tracing_shader_group_stack_size( &self, pipeline: vk::Pipeline, group: u32, group_shader: vk::ShaderGroupShaderKHR, ) -> vk::DeviceSize { (self.fp.get_ray_tracing_shader_group_stack_size_khr)( self.handle, pipeline, group, group_shader, ) } /// #[inline] pub unsafe fn cmd_set_ray_tracing_pipeline_stack_size( &self, command_buffer: vk::CommandBuffer, pipeline_stack_size: u32, ) { (self.fp.cmd_set_ray_tracing_pipeline_stack_size_khr)(command_buffer, pipeline_stack_size); } } ash-0.38.0+1.3.281/src/extensions/khr/sampler_ycbcr_conversion.rs000064400000000000000000000026301046102023000225100ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::sampler_ycbcr_conversion::Device { /// #[inline] pub unsafe fn create_sampler_ycbcr_conversion( &self, create_info: &vk::SamplerYcbcrConversionCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut ycbcr_conversion = mem::MaybeUninit::uninit(); (self.fp.create_sampler_ycbcr_conversion_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), ycbcr_conversion.as_mut_ptr(), ) .assume_init_on_success(ycbcr_conversion) } /// #[inline] pub unsafe fn destroy_sampler_ycbcr_conversion( &self, ycbcr_conversion: vk::SamplerYcbcrConversion, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_sampler_ycbcr_conversion_khr)( self.handle, ycbcr_conversion, allocation_callbacks.as_raw_ptr(), ) } } ash-0.38.0+1.3.281/src/extensions/khr/surface.rs000075500000000000000000000056201046102023000170530ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::khr::surface::Instance { /// #[inline] pub unsafe fn get_physical_device_surface_support( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, surface: vk::SurfaceKHR, ) -> VkResult { let mut b = mem::MaybeUninit::uninit(); (self.fp.get_physical_device_surface_support_khr)( physical_device, queue_family_index, surface, b.as_mut_ptr(), ) .result()?; Ok(b.assume_init() > 0) } /// #[inline] pub unsafe fn get_physical_device_surface_present_modes( &self, physical_device: vk::PhysicalDevice, surface: vk::SurfaceKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_surface_present_modes_khr)( physical_device, surface, count, data, ) }) } /// #[inline] pub unsafe fn get_physical_device_surface_capabilities( &self, physical_device: vk::PhysicalDevice, surface: vk::SurfaceKHR, ) -> VkResult { let mut surface_capabilities = mem::MaybeUninit::uninit(); (self.fp.get_physical_device_surface_capabilities_khr)( physical_device, surface, surface_capabilities.as_mut_ptr(), ) .assume_init_on_success(surface_capabilities) } /// #[inline] pub unsafe fn get_physical_device_surface_formats( &self, physical_device: vk::PhysicalDevice, surface: vk::SurfaceKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_surface_formats_khr)(physical_device, surface, count, data) }) } /// #[inline] pub unsafe fn destroy_surface( &self, surface: vk::SurfaceKHR, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_surface_khr)(self.handle, surface, allocation_callbacks.as_raw_ptr()); } } ash-0.38.0+1.3.281/src/extensions/khr/swapchain.rs000075500000000000000000000162121046102023000173770ustar 00000000000000//! #[cfg(doc)] use crate::khr; use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::khr::swapchain::Device { /// #[inline] pub unsafe fn create_swapchain( &self, create_info: &vk::SwapchainCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut swapchain = mem::MaybeUninit::uninit(); (self.fp.create_swapchain_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), swapchain.as_mut_ptr(), ) .assume_init_on_success(swapchain) } /// #[inline] pub unsafe fn destroy_swapchain( &self, swapchain: vk::SwapchainKHR, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_swapchain_khr)(self.handle, swapchain, allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn get_swapchain_images( &self, swapchain: vk::SwapchainKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_swapchain_images_khr)(self.handle, swapchain, count, data) }) } /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface. /// /// #[inline] pub unsafe fn acquire_next_image( &self, swapchain: vk::SwapchainKHR, timeout: u64, semaphore: vk::Semaphore, fence: vk::Fence, ) -> VkResult<(u32, bool)> { let mut index = mem::MaybeUninit::uninit(); let err_code = (self.fp.acquire_next_image_khr)( self.handle, swapchain, timeout, semaphore, fence, index.as_mut_ptr(), ); match err_code { vk::Result::SUCCESS => Ok((index.assume_init(), false)), vk::Result::SUBOPTIMAL_KHR => Ok((index.assume_init(), true)), _ => Err(err_code), } } /// On success, returns whether the swapchain is suboptimal for the surface. /// /// #[inline] pub unsafe fn queue_present( &self, queue: vk::Queue, present_info: &vk::PresentInfoKHR<'_>, ) -> VkResult { let err_code = (self.fp.queue_present_khr)(queue, present_info); match err_code { vk::Result::SUCCESS => Ok(false), vk::Result::SUBOPTIMAL_KHR => Ok(true), _ => Err(err_code), } } /// Only available since [Vulkan 1.1]. /// /// Also available as [`khr::device_group::Device::get_device_group_present_capabilities()`] /// when [`VK_KHR_surface`] is enabled. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_device_group_present_capabilities( &self, device_group_present_capabilities: &mut vk::DeviceGroupPresentCapabilitiesKHR<'_>, ) -> VkResult<()> { (self.fp.get_device_group_present_capabilities_khr)( self.handle, device_group_present_capabilities, ) .result() } /// Only available since [Vulkan 1.1]. /// /// Also available as [`khr::device_group::Device::get_device_group_surface_present_modes()`] /// when [`VK_KHR_surface`] is enabled. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_device_group_surface_present_modes( &self, surface: vk::SurfaceKHR, ) -> VkResult { let mut modes = mem::MaybeUninit::uninit(); (self.fp.get_device_group_surface_present_modes_khr)( self.handle, surface, modes.as_mut_ptr(), ) .assume_init_on_success(modes) } /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface. /// /// Only available since [Vulkan 1.1]. /// /// Also available as [`khr::device_group::Device::acquire_next_image2()`] /// when [`VK_KHR_swapchain`] is enabled. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_swapchain`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_swapchain.html #[inline] pub unsafe fn acquire_next_image2( &self, acquire_info: &vk::AcquireNextImageInfoKHR<'_>, ) -> VkResult<(u32, bool)> { let mut index = mem::MaybeUninit::uninit(); let err_code = (self.fp.acquire_next_image2_khr)(self.handle, acquire_info, index.as_mut_ptr()); match err_code { vk::Result::SUCCESS => Ok((index.assume_init(), false)), vk::Result::SUBOPTIMAL_KHR => Ok((index.assume_init(), true)), _ => Err(err_code), } } } impl crate::khr::swapchain::Instance { /// Only available since [Vulkan 1.1]. /// /// Also available as [`khr::device_group::Instance::get_physical_device_present_rectangles()`] /// when [`VK_KHR_surface`] is enabled. /// /// /// /// [Vulkan 1.1]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html /// [`VK_KHR_surface`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html #[inline] pub unsafe fn get_physical_device_present_rectangles( &self, physical_device: vk::PhysicalDevice, surface: vk::SurfaceKHR, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.fp.get_physical_device_present_rectangles_khr)( physical_device, surface, count, data, ) }) } } ash-0.38.0+1.3.281/src/extensions/khr/synchronization2.rs000064400000000000000000000051271046102023000207450ustar 00000000000000//! use crate::prelude::*; use crate::vk; impl crate::khr::synchronization2::Device { /// #[inline] pub unsafe fn cmd_pipeline_barrier2( &self, command_buffer: vk::CommandBuffer, dependency_info: &vk::DependencyInfoKHR<'_>, ) { (self.fp.cmd_pipeline_barrier2_khr)(command_buffer, dependency_info) } /// #[inline] pub unsafe fn cmd_reset_event2( &self, command_buffer: vk::CommandBuffer, event: vk::Event, stage_mask: vk::PipelineStageFlags2KHR, ) { (self.fp.cmd_reset_event2_khr)(command_buffer, event, stage_mask) } /// #[inline] pub unsafe fn cmd_set_event2( &self, command_buffer: vk::CommandBuffer, event: vk::Event, dependency_info: &vk::DependencyInfoKHR<'_>, ) { (self.fp.cmd_set_event2_khr)(command_buffer, event, dependency_info) } /// #[inline] pub unsafe fn cmd_wait_events2( &self, command_buffer: vk::CommandBuffer, events: &[vk::Event], dependency_infos: &[vk::DependencyInfoKHR<'_>], ) { assert_eq!(events.len(), dependency_infos.len()); (self.fp.cmd_wait_events2_khr)( command_buffer, events.len() as u32, events.as_ptr(), dependency_infos.as_ptr(), ) } /// #[inline] pub unsafe fn cmd_write_timestamp2( &self, command_buffer: vk::CommandBuffer, stage: vk::PipelineStageFlags2KHR, query_pool: vk::QueryPool, query: u32, ) { (self.fp.cmd_write_timestamp2_khr)(command_buffer, stage, query_pool, query) } /// #[inline] pub unsafe fn queue_submit2( &self, queue: vk::Queue, submits: &[vk::SubmitInfo2KHR<'_>], fence: vk::Fence, ) -> VkResult<()> { (self.fp.queue_submit2_khr)(queue, submits.len() as u32, submits.as_ptr(), fence).result() } } ash-0.38.0+1.3.281/src/extensions/khr/timeline_semaphore.rs000064400000000000000000000024031046102023000212650ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; impl crate::khr::timeline_semaphore::Device { /// #[inline] pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult { let mut value = mem::MaybeUninit::uninit(); (self.fp.get_semaphore_counter_value_khr)(self.handle, semaphore, value.as_mut_ptr()) .assume_init_on_success(value) } /// #[inline] pub unsafe fn wait_semaphores( &self, wait_info: &vk::SemaphoreWaitInfo<'_>, timeout: u64, ) -> VkResult<()> { (self.fp.wait_semaphores_khr)(self.handle, wait_info, timeout).result() } /// #[inline] pub unsafe fn signal_semaphore( &self, signal_info: &vk::SemaphoreSignalInfo<'_>, ) -> VkResult<()> { (self.fp.signal_semaphore_khr)(self.handle, signal_info).result() } } ash-0.38.0+1.3.281/src/extensions/khr/wayland_surface.rs000075500000000000000000000026021046102023000205670ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::wayland_surface::Instance { /// #[inline] pub unsafe fn create_wayland_surface( &self, create_info: &vk::WaylandSurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_wayland_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } /// #[inline] pub unsafe fn get_physical_device_wayland_presentation_support( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, wl_display: &mut vk::wl_display, ) -> bool { let b = (self.fp.get_physical_device_wayland_presentation_support_khr)( physical_device, queue_family_index, wl_display, ); b > 0 } } ash-0.38.0+1.3.281/src/extensions/khr/win32_surface.rs000075500000000000000000000024571046102023000201020ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::win32_surface::Instance { /// #[inline] pub unsafe fn create_win32_surface( &self, create_info: &vk::Win32SurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_win32_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } /// #[inline] pub unsafe fn get_physical_device_win32_presentation_support( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, ) -> bool { let b = (self.fp.get_physical_device_win32_presentation_support_khr)( physical_device, queue_family_index, ); b > 0 } } ash-0.38.0+1.3.281/src/extensions/khr/xcb_surface.rs000075500000000000000000000026421046102023000177100ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::xcb_surface::Instance { /// #[inline] pub unsafe fn create_xcb_surface( &self, create_info: &vk::XcbSurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_xcb_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } /// #[inline] pub unsafe fn get_physical_device_xcb_presentation_support( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, connection: &mut vk::xcb_connection_t, visual_id: vk::xcb_visualid_t, ) -> bool { let b = (self.fp.get_physical_device_xcb_presentation_support_khr)( physical_device, queue_family_index, connection, visual_id, ); b > 0 } } ash-0.38.0+1.3.281/src/extensions/khr/xlib_surface.rs000075500000000000000000000026261046102023000200740ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::khr::xlib_surface::Instance { /// #[inline] pub unsafe fn create_xlib_surface( &self, create_info: &vk::XlibSurfaceCreateInfoKHR<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_xlib_surface_khr)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } /// #[inline] pub unsafe fn get_physical_device_xlib_presentation_support( &self, physical_device: vk::PhysicalDevice, queue_family_index: u32, display: *mut vk::Display, visual_id: vk::VisualID, ) -> bool { let b = (self.fp.get_physical_device_xlib_presentation_support_khr)( physical_device, queue_family_index, display, visual_id, ); b > 0 } } ash-0.38.0+1.3.281/src/extensions/mod.rs000064400000000000000000000001731046102023000154110ustar 00000000000000pub mod amd; pub mod amdx; pub mod android; pub mod ext; pub mod google; pub mod khr; pub mod mvk; pub mod nn; pub mod nv; ash-0.38.0+1.3.281/src/extensions/mvk/ios_surface.rs000075500000000000000000000014721046102023000177370ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::mvk::ios_surface::Instance { /// #[inline] pub unsafe fn create_ios_surface( &self, create_info: &vk::IOSSurfaceCreateInfoMVK<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_ios_surface_mvk)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/mvk/macos_surface.rs000075500000000000000000000015101046102023000202400ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::mvk::macos_surface::Instance { /// #[inline] pub unsafe fn create_mac_os_surface( &self, create_info: &vk::MacOSSurfaceCreateInfoMVK<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_mac_os_surface_mvk)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/mvk/mod.rs000064400000000000000000000000541046102023000162040ustar 00000000000000pub mod ios_surface; pub mod macos_surface; ash-0.38.0+1.3.281/src/extensions/nn/mod.rs000064400000000000000000000000241046102023000160170ustar 00000000000000pub mod vi_surface; ash-0.38.0+1.3.281/src/extensions/nn/vi_surface.rs000064400000000000000000000014571046102023000174010ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use core::mem; impl crate::nn::vi_surface::Instance { /// #[inline] pub unsafe fn create_vi_surface( &self, create_info: &vk::ViSurfaceCreateInfoNN<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut surface = mem::MaybeUninit::uninit(); (self.fp.create_vi_surface_nn)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), surface.as_mut_ptr(), ) .assume_init_on_success(surface) } } ash-0.38.0+1.3.281/src/extensions/nv/copy_memory_indirect.rs000064400000000000000000000035711046102023000215050ustar 00000000000000//! use crate::vk; impl crate::nv::copy_memory_indirect::Device { /// /// /// `copy_buffer_address` is a buffer device address which is a pointer to an array of /// `copy_count` number of [`vk::CopyMemoryIndirectCommandNV`] structures containing the copy /// parameters, each `stride` bytes apart. #[inline] pub unsafe fn cmd_copy_memory_indirect( &self, command_buffer: vk::CommandBuffer, copy_buffer_address: vk::DeviceAddress, copy_count: u32, stride: u32, ) { (self.fp.cmd_copy_memory_indirect_nv)( command_buffer, copy_buffer_address, copy_count, stride, ) } /// /// /// `copy_buffer_address` is a buffer device address which is a pointer to an array of /// `image_subresources.len()` number of [`vk::CopyMemoryToImageIndirectCommandNV`] structures /// containing the copy parameters, each `stride` bytes apart. #[inline] pub unsafe fn cmd_copy_memory_to_image_indirect( &self, command_buffer: vk::CommandBuffer, copy_buffer_address: vk::DeviceAddress, stride: u32, dst_image: vk::Image, dst_image_layout: vk::ImageLayout, image_subresources: &[vk::ImageSubresourceLayers], ) { (self.fp.cmd_copy_memory_to_image_indirect_nv)( command_buffer, copy_buffer_address, image_subresources.len() as u32, stride, dst_image, dst_image_layout, image_subresources.as_ptr(), ) } } ash-0.38.0+1.3.281/src/extensions/nv/coverage_reduction_mode.rs000064400000000000000000000040061046102023000221270ustar 00000000000000//! use crate::prelude::*; use crate::vk; use core::mem; use core::ptr; impl crate::nv::coverage_reduction_mode::Instance { /// Retrieve the number of elements to pass to [`get_physical_device_supported_framebuffer_mixed_samples_combinations()`][Self::get_physical_device_supported_framebuffer_mixed_samples_combinations()] #[inline] pub unsafe fn get_physical_device_supported_framebuffer_mixed_samples_combinations_len( &self, physical_device: vk::PhysicalDevice, ) -> VkResult { let mut count = mem::MaybeUninit::uninit(); (self .fp .get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)( physical_device, count.as_mut_ptr(), ptr::null_mut(), ) .assume_init_on_success(count) .map(|c| c as usize) } /// /// /// Call [`get_physical_device_supported_framebuffer_mixed_samples_combinations_len()`][Self::get_physical_device_supported_framebuffer_mixed_samples_combinations_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_supported_framebuffer_mixed_samples_combinations( &self, physical_device: vk::PhysicalDevice, out: &mut [vk::FramebufferMixedSamplesCombinationNV<'_>], ) -> VkResult<()> { let mut count = out.len() as u32; (self .fp .get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)( physical_device, &mut count, out.as_mut_ptr(), ) .result()?; assert_eq!(count as usize, out.len()); Ok(()) } } ash-0.38.0+1.3.281/src/extensions/nv/cuda_kernel_launch.rs000064400000000000000000000055661046102023000210760ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::nv::cuda_kernel_launch::Device { /// #[inline] pub unsafe fn create_cuda_module( &self, create_info: &vk::CudaModuleCreateInfoNV<'_>, allocator: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut module = mem::MaybeUninit::uninit(); (self.fp.create_cuda_module_nv)( self.handle, create_info, allocator.as_raw_ptr(), module.as_mut_ptr(), ) .assume_init_on_success(module) } /// #[inline] pub unsafe fn get_cuda_module_cache(&self, module: vk::CudaModuleNV) -> VkResult> { read_into_uninitialized_vector(|cache_size, cache_data: *mut u8| { (self.fp.get_cuda_module_cache_nv)(self.handle, module, cache_size, cache_data.cast()) }) } /// #[inline] pub unsafe fn create_cuda_function( &self, create_info: &vk::CudaFunctionCreateInfoNV<'_>, allocator: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut function = mem::MaybeUninit::uninit(); (self.fp.create_cuda_function_nv)( self.handle, create_info, allocator.as_raw_ptr(), function.as_mut_ptr(), ) .assume_init_on_success(function) } /// #[inline] pub unsafe fn destroy_cuda_module( &self, module: vk::CudaModuleNV, allocator: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_cuda_module_nv)(self.handle, module, allocator.as_raw_ptr()) } /// #[inline] pub unsafe fn destroy_cuda_function( &self, function: vk::CudaFunctionNV, allocator: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_cuda_function_nv)(self.handle, function, allocator.as_raw_ptr()) } /// #[inline] pub unsafe fn cmd_cuda_launch_kernel( &self, command_buffer: vk::CommandBuffer, launch_info: &vk::CudaLaunchInfoNV<'_>, ) { (self.fp.cmd_cuda_launch_kernel_nv)(command_buffer, launch_info) } } ash-0.38.0+1.3.281/src/extensions/nv/device_diagnostic_checkpoints.rs000064400000000000000000000032321046102023000233110ustar 00000000000000//! use crate::vk; use core::ffi; use core::mem; use core::ptr; impl crate::nv::device_diagnostic_checkpoints::Device { /// #[inline] pub unsafe fn cmd_set_checkpoint( &self, command_buffer: vk::CommandBuffer, p_checkpoint_marker: *const ffi::c_void, ) { (self.fp.cmd_set_checkpoint_nv)(command_buffer, p_checkpoint_marker); } /// Retrieve the number of elements to pass to [`get_queue_checkpoint_data()`][Self::get_queue_checkpoint_data()] #[inline] pub unsafe fn get_queue_checkpoint_data_len(&self, queue: vk::Queue) -> usize { let mut count = mem::MaybeUninit::uninit(); (self.fp.get_queue_checkpoint_data_nv)(queue, count.as_mut_ptr(), ptr::null_mut()); count.assume_init() as usize } /// /// /// Call [`get_queue_checkpoint_data_len()`][Self::get_queue_checkpoint_data_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_queue_checkpoint_data( &self, queue: vk::Queue, out: &mut [vk::CheckpointDataNV<'_>], ) { let mut count = out.len() as u32; (self.fp.get_queue_checkpoint_data_nv)(queue, &mut count, out.as_mut_ptr()); assert_eq!(count as usize, out.len()); } } ash-0.38.0+1.3.281/src/extensions/nv/device_generated_commands_compute.rs000064400000000000000000000030421046102023000241450ustar 00000000000000//! use crate::vk; impl crate::nv::device_generated_commands_compute::Device { /// #[inline] pub unsafe fn get_pipeline_indirect_memory_requirements( &self, create_info: &vk::ComputePipelineCreateInfo<'_>, memory_requirements: &mut vk::MemoryRequirements2<'_>, ) { (self.fp.get_pipeline_indirect_memory_requirements_nv)( self.handle, create_info, memory_requirements, ) } /// #[inline] pub unsafe fn cmd_update_pipeline_indirect_buffer( &self, command_buffer: vk::CommandBuffer, pipeline_bind_point: vk::PipelineBindPoint, pipeline: vk::Pipeline, ) { (self.fp.cmd_update_pipeline_indirect_buffer_nv)( command_buffer, pipeline_bind_point, pipeline, ) } /// #[inline] pub unsafe fn get_pipeline_indirect_device_address( &self, info: &vk::PipelineIndirectDeviceAddressInfoNV<'_>, ) -> vk::DeviceAddress { (self.fp.get_pipeline_indirect_device_address_nv)(self.handle, info) } } ash-0.38.0+1.3.281/src/extensions/nv/low_latency2.rs000064400000000000000000000040331046102023000176560ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; impl crate::nv::low_latency2::Device { /// #[inline] pub unsafe fn set_latency_sleep_mode( &self, swapchain: vk::SwapchainKHR, sleep_mode_info: Option<&vk::LatencySleepModeInfoNV<'_>>, ) -> VkResult<()> { (self.fp.set_latency_sleep_mode_nv)(self.handle, swapchain, sleep_mode_info.as_raw_ptr()) .result() } /// #[inline] pub unsafe fn latency_sleep( &self, swapchain: vk::SwapchainKHR, sleep_info: &vk::LatencySleepInfoNV<'_>, ) -> VkResult<()> { (self.fp.latency_sleep_nv)(self.handle, swapchain, sleep_info).result() } /// #[inline] pub unsafe fn set_latency_marker( &self, swapchain: vk::SwapchainKHR, latency_marker_info: &vk::SetLatencyMarkerInfoNV<'_>, ) { (self.fp.set_latency_marker_nv)(self.handle, swapchain, latency_marker_info) } /// #[inline] pub unsafe fn get_latency_timings( &self, swapchain: vk::SwapchainKHR, latency_marker_info: &mut vk::GetLatencyMarkerInfoNV<'_>, ) { (self.fp.get_latency_timings_nv)(self.handle, swapchain, latency_marker_info) } /// #[inline] pub unsafe fn queue_notify_out_of_band( &self, queue: vk::Queue, queue_type_info: &vk::OutOfBandQueueTypeInfoNV<'_>, ) { (self.fp.queue_notify_out_of_band_nv)(queue, queue_type_info) } } ash-0.38.0+1.3.281/src/extensions/nv/memory_decompression.rs000064400000000000000000000023051046102023000215160ustar 00000000000000//! use crate::vk; impl crate::nv::memory_decompression::Device { /// pub unsafe fn cmd_decompress_memory( &self, command_buffer: vk::CommandBuffer, decompress_memory_regions: &[vk::DecompressMemoryRegionNV], ) { (self.fp.cmd_decompress_memory_nv)( command_buffer, decompress_memory_regions.len() as u32, decompress_memory_regions.as_ptr(), ) } /// pub unsafe fn cmd_decompress_memory_indirect_count( &self, command_buffer: vk::CommandBuffer, indirect_commands_address: vk::DeviceAddress, indirect_commands_count_address: vk::DeviceAddress, stride: u32, ) { (self.fp.cmd_decompress_memory_indirect_count_nv)( command_buffer, indirect_commands_address, indirect_commands_count_address, stride, ) } } ash-0.38.0+1.3.281/src/extensions/nv/mesh_shader.rs000075500000000000000000000032741046102023000175470ustar 00000000000000//! use crate::vk; impl crate::nv::mesh_shader::Device { /// #[inline] pub unsafe fn cmd_draw_mesh_tasks( &self, command_buffer: vk::CommandBuffer, task_count: u32, first_task: u32, ) { (self.fp.cmd_draw_mesh_tasks_nv)(command_buffer, task_count, first_task); } /// #[inline] pub unsafe fn cmd_draw_mesh_tasks_indirect( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_mesh_tasks_indirect_nv)( command_buffer, buffer, offset, draw_count, stride, ); } /// #[inline] pub unsafe fn cmd_draw_mesh_tasks_indirect_count( &self, command_buffer: vk::CommandBuffer, buffer: vk::Buffer, offset: vk::DeviceSize, count_buffer: vk::Buffer, count_buffer_offset: vk::DeviceSize, max_draw_count: u32, stride: u32, ) { (self.fp.cmd_draw_mesh_tasks_indirect_count_nv)( command_buffer, buffer, offset, count_buffer, count_buffer_offset, max_draw_count, stride, ); } } ash-0.38.0+1.3.281/src/extensions/nv/mod.rs000064400000000000000000000004131046102023000160310ustar 00000000000000pub mod copy_memory_indirect; pub mod coverage_reduction_mode; pub mod cuda_kernel_launch; pub mod device_diagnostic_checkpoints; pub mod device_generated_commands_compute; pub mod low_latency2; pub mod memory_decompression; pub mod mesh_shader; pub mod ray_tracing; ash-0.38.0+1.3.281/src/extensions/nv/ray_tracing.rs000075500000000000000000000206451046102023000175700ustar 00000000000000//! use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::mem; impl crate::nv::ray_tracing::Device { /// #[inline] pub unsafe fn create_acceleration_structure( &self, create_info: &vk::AccelerationStructureCreateInfoNV<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut accel_struct = mem::MaybeUninit::uninit(); (self.fp.create_acceleration_structure_nv)( self.handle, create_info, allocation_callbacks.as_raw_ptr(), accel_struct.as_mut_ptr(), ) .assume_init_on_success(accel_struct) } /// #[inline] pub unsafe fn destroy_acceleration_structure( &self, accel_struct: vk::AccelerationStructureNV, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.fp.destroy_acceleration_structure_nv)( self.handle, accel_struct, allocation_callbacks.as_raw_ptr(), ); } /// #[inline] pub unsafe fn get_acceleration_structure_memory_requirements( &self, info: &vk::AccelerationStructureMemoryRequirementsInfoNV<'_>, ) -> vk::MemoryRequirements2KHR<'_> { let mut requirements = Default::default(); (self.fp.get_acceleration_structure_memory_requirements_nv)( self.handle, info, &mut requirements, ); requirements } /// #[inline] pub unsafe fn bind_acceleration_structure_memory( &self, bind_info: &[vk::BindAccelerationStructureMemoryInfoNV<'_>], ) -> VkResult<()> { (self.fp.bind_acceleration_structure_memory_nv)( self.handle, bind_info.len() as u32, bind_info.as_ptr(), ) .result() } /// #[inline] pub unsafe fn cmd_build_acceleration_structure( &self, command_buffer: vk::CommandBuffer, info: &vk::AccelerationStructureInfoNV<'_>, instance_data: vk::Buffer, instance_offset: vk::DeviceSize, update: bool, dst: vk::AccelerationStructureNV, src: vk::AccelerationStructureNV, scratch: vk::Buffer, scratch_offset: vk::DeviceSize, ) { (self.fp.cmd_build_acceleration_structure_nv)( command_buffer, info, instance_data, instance_offset, if update { vk::TRUE } else { vk::FALSE }, dst, src, scratch, scratch_offset, ); } /// #[inline] pub unsafe fn cmd_copy_acceleration_structure( &self, command_buffer: vk::CommandBuffer, dst: vk::AccelerationStructureNV, src: vk::AccelerationStructureNV, mode: vk::CopyAccelerationStructureModeNV, ) { (self.fp.cmd_copy_acceleration_structure_nv)(command_buffer, dst, src, mode); } /// #[inline] pub unsafe fn cmd_trace_rays( &self, command_buffer: vk::CommandBuffer, raygen_shader_binding_table_buffer: vk::Buffer, raygen_shader_binding_offset: vk::DeviceSize, miss_shader_binding_table_buffer: vk::Buffer, miss_shader_binding_offset: vk::DeviceSize, miss_shader_binding_stride: vk::DeviceSize, hit_shader_binding_table_buffer: vk::Buffer, hit_shader_binding_offset: vk::DeviceSize, hit_shader_binding_stride: vk::DeviceSize, callable_shader_binding_table_buffer: vk::Buffer, callable_shader_binding_offset: vk::DeviceSize, callable_shader_binding_stride: vk::DeviceSize, width: u32, height: u32, depth: u32, ) { (self.fp.cmd_trace_rays_nv)( command_buffer, raygen_shader_binding_table_buffer, raygen_shader_binding_offset, miss_shader_binding_table_buffer, miss_shader_binding_offset, miss_shader_binding_stride, hit_shader_binding_table_buffer, hit_shader_binding_offset, hit_shader_binding_stride, callable_shader_binding_table_buffer, callable_shader_binding_offset, callable_shader_binding_stride, width, height, depth, ); } /// /// /// Pipelines are created and returned as described for [Multiple Pipeline Creation]. /// /// [Multiple Pipeline Creation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#pipelines-multiple #[inline] pub unsafe fn create_ray_tracing_pipelines( &self, pipeline_cache: vk::PipelineCache, create_infos: &[vk::RayTracingPipelineCreateInfoNV<'_>], allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> Result, (Vec, vk::Result)> { let mut pipelines = Vec::with_capacity(create_infos.len()); let err_code = (self.fp.create_ray_tracing_pipelines_nv)( self.handle, pipeline_cache, create_infos.len() as u32, create_infos.as_ptr(), allocation_callbacks.as_raw_ptr(), pipelines.as_mut_ptr(), ); pipelines.set_len(create_infos.len()); match err_code { vk::Result::SUCCESS => Ok(pipelines), _ => Err((pipelines, err_code)), } } /// #[inline] pub unsafe fn get_ray_tracing_shader_group_handles( &self, pipeline: vk::Pipeline, first_group: u32, group_count: u32, data: &mut [u8], ) -> VkResult<()> { (self.fp.get_ray_tracing_shader_group_handles_nv)( self.handle, pipeline, first_group, group_count, data.len(), data.as_mut_ptr().cast(), ) .result() } /// #[inline] pub unsafe fn get_acceleration_structure_handle( &self, accel_struct: vk::AccelerationStructureNV, ) -> VkResult { let mut handle = mem::MaybeUninit::::uninit(); (self.fp.get_acceleration_structure_handle_nv)( self.handle, accel_struct, mem::size_of_val(&handle), handle.as_mut_ptr().cast(), ) .assume_init_on_success(handle) } /// #[inline] pub unsafe fn cmd_write_acceleration_structures_properties( &self, command_buffer: vk::CommandBuffer, structures: &[vk::AccelerationStructureNV], query_type: vk::QueryType, query_pool: vk::QueryPool, first_query: u32, ) { (self.fp.cmd_write_acceleration_structures_properties_nv)( command_buffer, structures.len() as u32, structures.as_ptr(), query_type, query_pool, first_query, ); } /// #[inline] pub unsafe fn compile_deferred(&self, pipeline: vk::Pipeline, shader: u32) -> VkResult<()> { (self.fp.compile_deferred_nv)(self.handle, pipeline, shader).result() } } ash-0.38.0+1.3.281/src/extensions_generated.rs000064400000000000000000034074031046102023000166620ustar 00000000000000#![allow(unused_imports)] use crate::vk::*; use core::ffi::*; #[doc = "Extensions tagged AMD"] pub mod amd { #[doc = "VK_AMD_rasterization_order"] pub mod rasterization_order { use super::super::*; pub use { crate::vk::AMD_RASTERIZATION_ORDER_NAME as NAME, crate::vk::AMD_RASTERIZATION_ORDER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_trinary_minmax"] pub mod shader_trinary_minmax { use super::super::*; pub use { crate::vk::AMD_SHADER_TRINARY_MINMAX_NAME as NAME, crate::vk::AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_explicit_vertex_parameter"] pub mod shader_explicit_vertex_parameter { use super::super::*; pub use { crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_NAME as NAME, crate::vk::AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_gcn_shader"] pub mod gcn_shader { use super::super::*; pub use { crate::vk::AMD_GCN_SHADER_NAME as NAME, crate::vk::AMD_GCN_SHADER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_draw_indirect_count"] pub mod draw_indirect_count { use super::super::*; pub use { crate::vk::AMD_DRAW_INDIRECT_COUNT_NAME as NAME, crate::vk::AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_AMD_draw_indirect_count device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_AMD_draw_indirect_count device-level function pointers"] pub struct DeviceFn { pub cmd_draw_indirect_count_amd: PFN_vkCmdDrawIndirectCount, pub cmd_draw_indexed_indirect_count_amd: PFN_vkCmdDrawIndexedIndirectCount, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_indirect_count_amd: unsafe { unsafe extern "system" fn cmd_draw_indirect_count_amd( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indirect_count_amd) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCountAMD\0"); let val = _f(cname); if val.is_null() { cmd_draw_indirect_count_amd } else { ::core::mem::transmute(val) } }, cmd_draw_indexed_indirect_count_amd: unsafe { unsafe extern "system" fn cmd_draw_indexed_indirect_count_amd( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indexed_indirect_count_amd) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDrawIndexedIndirectCountAMD\0", ); let val = _f(cname); if val.is_null() { cmd_draw_indexed_indirect_count_amd } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_AMD_negative_viewport_height"] pub mod negative_viewport_height { use super::super::*; pub use { crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_NAME as NAME, crate::vk::AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_gpu_shader_half_float"] pub mod gpu_shader_half_float { use super::super::*; pub use { crate::vk::AMD_GPU_SHADER_HALF_FLOAT_NAME as NAME, crate::vk::AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_ballot"] pub mod shader_ballot { use super::super::*; pub use { crate::vk::AMD_SHADER_BALLOT_NAME as NAME, crate::vk::AMD_SHADER_BALLOT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_texture_gather_bias_lod"] pub mod texture_gather_bias_lod { use super::super::*; pub use { crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_NAME as NAME, crate::vk::AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_info"] pub mod shader_info { use super::super::*; pub use { crate::vk::AMD_SHADER_INFO_NAME as NAME, crate::vk::AMD_SHADER_INFO_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_AMD_shader_info device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_AMD_shader_info device-level function pointers"] pub struct DeviceFn { pub get_shader_info_amd: PFN_vkGetShaderInfoAMD, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_shader_info_amd: unsafe { unsafe extern "system" fn get_shader_info_amd( _device: crate::vk::Device, _pipeline: Pipeline, _shader_stage: ShaderStageFlags, _info_type: ShaderInfoTypeAMD, _p_info_size: *mut usize, _p_info: *mut c_void, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_shader_info_amd))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetShaderInfoAMD\0"); let val = _f(cname); if val.is_null() { get_shader_info_amd } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_AMD_shader_image_load_store_lod"] pub mod shader_image_load_store_lod { use super::super::*; pub use { crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_NAME as NAME, crate::vk::AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_gpu_shader_int16"] pub mod gpu_shader_int16 { use super::super::*; pub use { crate::vk::AMD_GPU_SHADER_INT16_NAME as NAME, crate::vk::AMD_GPU_SHADER_INT16_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_mixed_attachment_samples"] pub mod mixed_attachment_samples { use super::super::*; pub use { crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_NAME as NAME, crate::vk::AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_fragment_mask"] pub mod shader_fragment_mask { use super::super::*; pub use { crate::vk::AMD_SHADER_FRAGMENT_MASK_NAME as NAME, crate::vk::AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_buffer_marker"] pub mod buffer_marker { use super::super::*; pub use { crate::vk::AMD_BUFFER_MARKER_NAME as NAME, crate::vk::AMD_BUFFER_MARKER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_AMD_buffer_marker device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_AMD_buffer_marker device-level function pointers"] pub struct DeviceFn { pub cmd_write_buffer_marker_amd: PFN_vkCmdWriteBufferMarkerAMD, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_write_buffer_marker_amd: unsafe { unsafe extern "system" fn cmd_write_buffer_marker_amd( _command_buffer: CommandBuffer, _pipeline_stage: PipelineStageFlags, _dst_buffer: Buffer, _dst_offset: DeviceSize, _marker: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_buffer_marker_amd) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteBufferMarkerAMD\0"); let val = _f(cname); if val.is_null() { cmd_write_buffer_marker_amd } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_AMD_pipeline_compiler_control"] pub mod pipeline_compiler_control { use super::super::*; pub use { crate::vk::AMD_PIPELINE_COMPILER_CONTROL_NAME as NAME, crate::vk::AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_core_properties"] pub mod shader_core_properties { use super::super::*; pub use { crate::vk::AMD_SHADER_CORE_PROPERTIES_NAME as NAME, crate::vk::AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_memory_overallocation_behavior"] pub mod memory_overallocation_behavior { use super::super::*; pub use { crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_NAME as NAME, crate::vk::AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_display_native_hdr"] pub mod display_native_hdr { use super::super::*; pub use { crate::vk::AMD_DISPLAY_NATIVE_HDR_NAME as NAME, crate::vk::AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_AMD_display_native_hdr device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_AMD_display_native_hdr device-level function pointers"] pub struct DeviceFn { pub set_local_dimming_amd: PFN_vkSetLocalDimmingAMD, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { set_local_dimming_amd: unsafe { unsafe extern "system" fn set_local_dimming_amd( _device: crate::vk::Device, _swap_chain: SwapchainKHR, _local_dimming_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(set_local_dimming_amd) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetLocalDimmingAMD\0"); let val = _f(cname); if val.is_null() { set_local_dimming_amd } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_AMD_shader_core_properties2"] pub mod shader_core_properties2 { use super::super::*; pub use { crate::vk::AMD_SHADER_CORE_PROPERTIES2_NAME as NAME, crate::vk::AMD_SHADER_CORE_PROPERTIES2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_device_coherent_memory"] pub mod device_coherent_memory { use super::super::*; pub use { crate::vk::AMD_DEVICE_COHERENT_MEMORY_NAME as NAME, crate::vk::AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_AMD_shader_early_and_late_fragment_tests"] pub mod shader_early_and_late_fragment_tests { use super::super::*; pub use { crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_NAME as NAME, crate::vk::AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged AMDX"] pub mod amdx { #[doc = "VK_AMDX_shader_enqueue"] pub mod shader_enqueue { use super::super::*; pub use { crate::vk::AMDX_SHADER_ENQUEUE_NAME as NAME, crate::vk::AMDX_SHADER_ENQUEUE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_AMDX_shader_enqueue device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_AMDX_shader_enqueue device-level function pointers"] pub struct DeviceFn { pub create_execution_graph_pipelines_amdx: PFN_vkCreateExecutionGraphPipelinesAMDX, pub get_execution_graph_pipeline_scratch_size_amdx: PFN_vkGetExecutionGraphPipelineScratchSizeAMDX, pub get_execution_graph_pipeline_node_index_amdx: PFN_vkGetExecutionGraphPipelineNodeIndexAMDX, pub cmd_initialize_graph_scratch_memory_amdx: PFN_vkCmdInitializeGraphScratchMemoryAMDX, pub cmd_dispatch_graph_amdx: PFN_vkCmdDispatchGraphAMDX, pub cmd_dispatch_graph_indirect_amdx: PFN_vkCmdDispatchGraphIndirectAMDX, pub cmd_dispatch_graph_indirect_count_amdx: PFN_vkCmdDispatchGraphIndirectCountAMDX, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_execution_graph_pipelines_amdx: unsafe { unsafe extern "system" fn create_execution_graph_pipelines_amdx( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _create_info_count: u32, _p_create_infos: *const ExecutionGraphPipelineCreateInfoAMDX<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipelines: *mut Pipeline, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_execution_graph_pipelines_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateExecutionGraphPipelinesAMDX\0", ); let val = _f(cname); if val.is_null() { create_execution_graph_pipelines_amdx } else { ::core::mem::transmute(val) } }, get_execution_graph_pipeline_scratch_size_amdx: unsafe { unsafe extern "system" fn get_execution_graph_pipeline_scratch_size_amdx( _device: crate::vk::Device, _execution_graph: Pipeline, _p_size_info: *mut ExecutionGraphPipelineScratchSizeAMDX<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_execution_graph_pipeline_scratch_size_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetExecutionGraphPipelineScratchSizeAMDX\0", ); let val = _f(cname); if val.is_null() { get_execution_graph_pipeline_scratch_size_amdx } else { ::core::mem::transmute(val) } }, get_execution_graph_pipeline_node_index_amdx: unsafe { unsafe extern "system" fn get_execution_graph_pipeline_node_index_amdx( _device: crate::vk::Device, _execution_graph: Pipeline, _p_node_info: *const PipelineShaderStageNodeCreateInfoAMDX<'_>, _p_node_index: *mut u32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_execution_graph_pipeline_node_index_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetExecutionGraphPipelineNodeIndexAMDX\0", ); let val = _f(cname); if val.is_null() { get_execution_graph_pipeline_node_index_amdx } else { ::core::mem::transmute(val) } }, cmd_initialize_graph_scratch_memory_amdx: unsafe { unsafe extern "system" fn cmd_initialize_graph_scratch_memory_amdx( _command_buffer: CommandBuffer, _scratch: DeviceAddress, ) { panic!(concat!( "Unable to load ", stringify!(cmd_initialize_graph_scratch_memory_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdInitializeGraphScratchMemoryAMDX\0", ); let val = _f(cname); if val.is_null() { cmd_initialize_graph_scratch_memory_amdx } else { ::core::mem::transmute(val) } }, cmd_dispatch_graph_amdx: unsafe { unsafe extern "system" fn cmd_dispatch_graph_amdx( _command_buffer: CommandBuffer, _scratch: DeviceAddress, _p_count_info: *const DispatchGraphCountInfoAMDX, ) { panic!(concat!( "Unable to load ", stringify!(cmd_dispatch_graph_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchGraphAMDX\0"); let val = _f(cname); if val.is_null() { cmd_dispatch_graph_amdx } else { ::core::mem::transmute(val) } }, cmd_dispatch_graph_indirect_amdx: unsafe { unsafe extern "system" fn cmd_dispatch_graph_indirect_amdx( _command_buffer: CommandBuffer, _scratch: DeviceAddress, _p_count_info: *const DispatchGraphCountInfoAMDX, ) { panic!(concat!( "Unable to load ", stringify!(cmd_dispatch_graph_indirect_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDispatchGraphIndirectAMDX\0", ); let val = _f(cname); if val.is_null() { cmd_dispatch_graph_indirect_amdx } else { ::core::mem::transmute(val) } }, cmd_dispatch_graph_indirect_count_amdx: unsafe { unsafe extern "system" fn cmd_dispatch_graph_indirect_count_amdx( _command_buffer: CommandBuffer, _scratch: DeviceAddress, _count_info: DeviceAddress, ) { panic!(concat!( "Unable to load ", stringify!(cmd_dispatch_graph_indirect_count_amdx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDispatchGraphIndirectCountAMDX\0", ); let val = _f(cname); if val.is_null() { cmd_dispatch_graph_indirect_count_amdx } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged ANDROID"] pub mod android { #[doc = "VK_ANDROID_native_buffer"] pub mod native_buffer { use super::super::*; pub use { crate::vk::ANDROID_NATIVE_BUFFER_NAME as NAME, crate::vk::ANDROID_NATIVE_BUFFER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_ANDROID_native_buffer device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_ANDROID_native_buffer device-level function pointers"] pub struct DeviceFn { pub get_swapchain_gralloc_usage_android: PFN_vkGetSwapchainGrallocUsageANDROID, pub acquire_image_android: PFN_vkAcquireImageANDROID, pub queue_signal_release_image_android: PFN_vkQueueSignalReleaseImageANDROID, pub get_swapchain_gralloc_usage2_android: PFN_vkGetSwapchainGrallocUsage2ANDROID, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_swapchain_gralloc_usage_android: unsafe { unsafe extern "system" fn get_swapchain_gralloc_usage_android( _device: crate::vk::Device, _format: Format, _image_usage: ImageUsageFlags, _gralloc_usage: *mut c_int, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_swapchain_gralloc_usage_android) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetSwapchainGrallocUsageANDROID\0", ); let val = _f(cname); if val.is_null() { get_swapchain_gralloc_usage_android } else { ::core::mem::transmute(val) } }, acquire_image_android: unsafe { unsafe extern "system" fn acquire_image_android( _device: crate::vk::Device, _image: Image, _native_fence_fd: c_int, _semaphore: Semaphore, _fence: Fence, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_image_android) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireImageANDROID\0"); let val = _f(cname); if val.is_null() { acquire_image_android } else { ::core::mem::transmute(val) } }, queue_signal_release_image_android: unsafe { unsafe extern "system" fn queue_signal_release_image_android( _queue: Queue, _wait_semaphore_count: u32, _p_wait_semaphores: *const Semaphore, _image: Image, _p_native_fence_fd: *mut c_int, ) -> Result { panic!(concat!( "Unable to load ", stringify!(queue_signal_release_image_android) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkQueueSignalReleaseImageANDROID\0", ); let val = _f(cname); if val.is_null() { queue_signal_release_image_android } else { ::core::mem::transmute(val) } }, get_swapchain_gralloc_usage2_android: unsafe { unsafe extern "system" fn get_swapchain_gralloc_usage2_android( _device: crate::vk::Device, _format: Format, _image_usage: ImageUsageFlags, _swapchain_image_usage: SwapchainImageUsageFlagsANDROID, _gralloc_consumer_usage: *mut u64, _gralloc_producer_usage: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_swapchain_gralloc_usage2_android) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetSwapchainGrallocUsage2ANDROID\0", ); let val = _f(cname); if val.is_null() { get_swapchain_gralloc_usage2_android } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_ANDROID_external_memory_android_hardware_buffer"] pub mod external_memory_android_hardware_buffer { use super::super::*; pub use { crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_NAME as NAME, crate::vk::ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_ANDROID_external_memory_android_hardware_buffer device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_ANDROID_external_memory_android_hardware_buffer device-level function pointers"] pub struct DeviceFn { pub get_android_hardware_buffer_properties_android: PFN_vkGetAndroidHardwareBufferPropertiesANDROID, pub get_memory_android_hardware_buffer_android: PFN_vkGetMemoryAndroidHardwareBufferANDROID, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_android_hardware_buffer_properties_android: unsafe { unsafe extern "system" fn get_android_hardware_buffer_properties_android( _device: crate::vk::Device, _buffer: *const AHardwareBuffer, _p_properties: *mut AndroidHardwareBufferPropertiesANDROID<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_android_hardware_buffer_properties_android) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAndroidHardwareBufferPropertiesANDROID\0", ); let val = _f(cname); if val.is_null() { get_android_hardware_buffer_properties_android } else { ::core::mem::transmute(val) } }, get_memory_android_hardware_buffer_android: unsafe { unsafe extern "system" fn get_memory_android_hardware_buffer_android( _device: crate::vk::Device, _p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID<'_>, _p_buffer: *mut *mut AHardwareBuffer, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_android_hardware_buffer_android) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetMemoryAndroidHardwareBufferANDROID\0", ); let val = _f(cname); if val.is_null() { get_memory_android_hardware_buffer_android } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_ANDROID_external_format_resolve"] pub mod external_format_resolve { use super::super::*; pub use { crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_NAME as NAME, crate::vk::ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged ARM"] pub mod arm { #[doc = "VK_ARM_rasterization_order_attachment_access"] pub mod rasterization_order_attachment_access { use super::super::*; pub use { crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_NAME as NAME, crate::vk::ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_ARM_shader_core_properties"] pub mod shader_core_properties { use super::super::*; pub use { crate::vk::ARM_SHADER_CORE_PROPERTIES_NAME as NAME, crate::vk::ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_ARM_scheduling_controls"] pub mod scheduling_controls { use super::super::*; pub use { crate::vk::ARM_SCHEDULING_CONTROLS_NAME as NAME, crate::vk::ARM_SCHEDULING_CONTROLS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_ARM_render_pass_striped"] pub mod render_pass_striped { use super::super::*; pub use { crate::vk::ARM_RENDER_PASS_STRIPED_NAME as NAME, crate::vk::ARM_RENDER_PASS_STRIPED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_ARM_shader_core_builtins"] pub mod shader_core_builtins { use super::super::*; pub use { crate::vk::ARM_SHADER_CORE_BUILTINS_NAME as NAME, crate::vk::ARM_SHADER_CORE_BUILTINS_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged EXT"] pub mod ext { #[doc = "VK_EXT_debug_report"] pub mod debug_report { use super::super::*; pub use { crate::vk::EXT_DEBUG_REPORT_NAME as NAME, crate::vk::EXT_DEBUG_REPORT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_debug_report instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_debug_report instance-level function pointers"] pub struct InstanceFn { pub create_debug_report_callback_ext: PFN_vkCreateDebugReportCallbackEXT, pub destroy_debug_report_callback_ext: PFN_vkDestroyDebugReportCallbackEXT, pub debug_report_message_ext: PFN_vkDebugReportMessageEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_debug_report_callback_ext: unsafe { unsafe extern "system" fn create_debug_report_callback_ext( _instance: crate::vk::Instance, _p_create_info: *const DebugReportCallbackCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_callback: *mut DebugReportCallbackEXT, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_debug_report_callback_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateDebugReportCallbackEXT\0", ); let val = _f(cname); if val.is_null() { create_debug_report_callback_ext } else { ::core::mem::transmute(val) } }, destroy_debug_report_callback_ext: unsafe { unsafe extern "system" fn destroy_debug_report_callback_ext( _instance: crate::vk::Instance, _callback: DebugReportCallbackEXT, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_debug_report_callback_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyDebugReportCallbackEXT\0", ); let val = _f(cname); if val.is_null() { destroy_debug_report_callback_ext } else { ::core::mem::transmute(val) } }, debug_report_message_ext: unsafe { unsafe extern "system" fn debug_report_message_ext( _instance: crate::vk::Instance, _flags: DebugReportFlagsEXT, _object_type: DebugReportObjectTypeEXT, _object: u64, _location: usize, _message_code: i32, _p_layer_prefix: *const c_char, _p_message: *const c_char, ) { panic!(concat!( "Unable to load ", stringify!(debug_report_message_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDebugReportMessageEXT\0"); let val = _f(cname); if val.is_null() { debug_report_message_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_depth_range_unrestricted"] pub mod depth_range_unrestricted { use super::super::*; pub use { crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_NAME as NAME, crate::vk::EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_debug_marker"] pub mod debug_marker { use super::super::*; pub use { crate::vk::EXT_DEBUG_MARKER_NAME as NAME, crate::vk::EXT_DEBUG_MARKER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_debug_marker device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_debug_marker device-level function pointers"] pub struct DeviceFn { pub debug_marker_set_object_tag_ext: PFN_vkDebugMarkerSetObjectTagEXT, pub debug_marker_set_object_name_ext: PFN_vkDebugMarkerSetObjectNameEXT, pub cmd_debug_marker_begin_ext: PFN_vkCmdDebugMarkerBeginEXT, pub cmd_debug_marker_end_ext: PFN_vkCmdDebugMarkerEndEXT, pub cmd_debug_marker_insert_ext: PFN_vkCmdDebugMarkerInsertEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { debug_marker_set_object_tag_ext: unsafe { unsafe extern "system" fn debug_marker_set_object_tag_ext( _device: crate::vk::Device, _p_tag_info: *const DebugMarkerObjectTagInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(debug_marker_set_object_tag_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDebugMarkerSetObjectTagEXT\0"); let val = _f(cname); if val.is_null() { debug_marker_set_object_tag_ext } else { ::core::mem::transmute(val) } }, debug_marker_set_object_name_ext: unsafe { unsafe extern "system" fn debug_marker_set_object_name_ext( _device: crate::vk::Device, _p_name_info: *const DebugMarkerObjectNameInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(debug_marker_set_object_name_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDebugMarkerSetObjectNameEXT\0"); let val = _f(cname); if val.is_null() { debug_marker_set_object_name_ext } else { ::core::mem::transmute(val) } }, cmd_debug_marker_begin_ext: unsafe { unsafe extern "system" fn cmd_debug_marker_begin_ext( _command_buffer: CommandBuffer, _p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_debug_marker_begin_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerBeginEXT\0"); let val = _f(cname); if val.is_null() { cmd_debug_marker_begin_ext } else { ::core::mem::transmute(val) } }, cmd_debug_marker_end_ext: unsafe { unsafe extern "system" fn cmd_debug_marker_end_ext( _command_buffer: CommandBuffer, ) { panic!(concat!( "Unable to load ", stringify!(cmd_debug_marker_end_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerEndEXT\0"); let val = _f(cname); if val.is_null() { cmd_debug_marker_end_ext } else { ::core::mem::transmute(val) } }, cmd_debug_marker_insert_ext: unsafe { unsafe extern "system" fn cmd_debug_marker_insert_ext( _command_buffer: CommandBuffer, _p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_debug_marker_insert_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerInsertEXT\0"); let val = _f(cname); if val.is_null() { cmd_debug_marker_insert_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_transform_feedback"] pub mod transform_feedback { use super::super::*; pub use { crate::vk::EXT_TRANSFORM_FEEDBACK_NAME as NAME, crate::vk::EXT_TRANSFORM_FEEDBACK_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_transform_feedback device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_transform_feedback device-level function pointers"] pub struct DeviceFn { pub cmd_bind_transform_feedback_buffers_ext: PFN_vkCmdBindTransformFeedbackBuffersEXT, pub cmd_begin_transform_feedback_ext: PFN_vkCmdBeginTransformFeedbackEXT, pub cmd_end_transform_feedback_ext: PFN_vkCmdEndTransformFeedbackEXT, pub cmd_begin_query_indexed_ext: PFN_vkCmdBeginQueryIndexedEXT, pub cmd_end_query_indexed_ext: PFN_vkCmdEndQueryIndexedEXT, pub cmd_draw_indirect_byte_count_ext: PFN_vkCmdDrawIndirectByteCountEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_bind_transform_feedback_buffers_ext: unsafe { unsafe extern "system" fn cmd_bind_transform_feedback_buffers_ext( _command_buffer: CommandBuffer, _first_binding: u32, _binding_count: u32, _p_buffers: *const Buffer, _p_offsets: *const DeviceSize, _p_sizes: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_transform_feedback_buffers_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBindTransformFeedbackBuffersEXT\0", ); let val = _f(cname); if val.is_null() { cmd_bind_transform_feedback_buffers_ext } else { ::core::mem::transmute(val) } }, cmd_begin_transform_feedback_ext: unsafe { unsafe extern "system" fn cmd_begin_transform_feedback_ext( _command_buffer: CommandBuffer, _first_counter_buffer: u32, _counter_buffer_count: u32, _p_counter_buffers: *const Buffer, _p_counter_buffer_offsets: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_transform_feedback_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBeginTransformFeedbackEXT\0", ); let val = _f(cname); if val.is_null() { cmd_begin_transform_feedback_ext } else { ::core::mem::transmute(val) } }, cmd_end_transform_feedback_ext: unsafe { unsafe extern "system" fn cmd_end_transform_feedback_ext( _command_buffer: CommandBuffer, _first_counter_buffer: u32, _counter_buffer_count: u32, _p_counter_buffers: *const Buffer, _p_counter_buffer_offsets: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_transform_feedback_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndTransformFeedbackEXT\0"); let val = _f(cname); if val.is_null() { cmd_end_transform_feedback_ext } else { ::core::mem::transmute(val) } }, cmd_begin_query_indexed_ext: unsafe { unsafe extern "system" fn cmd_begin_query_indexed_ext( _command_buffer: CommandBuffer, _query_pool: QueryPool, _query: u32, _flags: QueryControlFlags, _index: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_query_indexed_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginQueryIndexedEXT\0"); let val = _f(cname); if val.is_null() { cmd_begin_query_indexed_ext } else { ::core::mem::transmute(val) } }, cmd_end_query_indexed_ext: unsafe { unsafe extern "system" fn cmd_end_query_indexed_ext( _command_buffer: CommandBuffer, _query_pool: QueryPool, _query: u32, _index: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_query_indexed_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndQueryIndexedEXT\0"); let val = _f(cname); if val.is_null() { cmd_end_query_indexed_ext } else { ::core::mem::transmute(val) } }, cmd_draw_indirect_byte_count_ext: unsafe { unsafe extern "system" fn cmd_draw_indirect_byte_count_ext( _command_buffer: CommandBuffer, _instance_count: u32, _first_instance: u32, _counter_buffer: Buffer, _counter_buffer_offset: DeviceSize, _counter_offset: u32, _vertex_stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indirect_byte_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectByteCountEXT\0"); let val = _f(cname); if val.is_null() { cmd_draw_indirect_byte_count_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_validation_flags"] pub mod validation_flags { use super::super::*; pub use { crate::vk::EXT_VALIDATION_FLAGS_NAME as NAME, crate::vk::EXT_VALIDATION_FLAGS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_subgroup_ballot"] pub mod shader_subgroup_ballot { use super::super::*; pub use { crate::vk::EXT_SHADER_SUBGROUP_BALLOT_NAME as NAME, crate::vk::EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_subgroup_vote"] pub mod shader_subgroup_vote { use super::super::*; pub use { crate::vk::EXT_SHADER_SUBGROUP_VOTE_NAME as NAME, crate::vk::EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_texture_compression_astc_hdr"] pub mod texture_compression_astc_hdr { use super::super::*; pub use { crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_NAME as NAME, crate::vk::EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_astc_decode_mode"] pub mod astc_decode_mode { use super::super::*; pub use { crate::vk::EXT_ASTC_DECODE_MODE_NAME as NAME, crate::vk::EXT_ASTC_DECODE_MODE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pipeline_robustness"] pub mod pipeline_robustness { use super::super::*; pub use { crate::vk::EXT_PIPELINE_ROBUSTNESS_NAME as NAME, crate::vk::EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_conditional_rendering"] pub mod conditional_rendering { use super::super::*; pub use { crate::vk::EXT_CONDITIONAL_RENDERING_NAME as NAME, crate::vk::EXT_CONDITIONAL_RENDERING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_conditional_rendering device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_conditional_rendering device-level function pointers"] pub struct DeviceFn { pub cmd_begin_conditional_rendering_ext: PFN_vkCmdBeginConditionalRenderingEXT, pub cmd_end_conditional_rendering_ext: PFN_vkCmdEndConditionalRenderingEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_begin_conditional_rendering_ext: unsafe { unsafe extern "system" fn cmd_begin_conditional_rendering_ext( _command_buffer: CommandBuffer, _p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT< '_, >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_conditional_rendering_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBeginConditionalRenderingEXT\0", ); let val = _f(cname); if val.is_null() { cmd_begin_conditional_rendering_ext } else { ::core::mem::transmute(val) } }, cmd_end_conditional_rendering_ext: unsafe { unsafe extern "system" fn cmd_end_conditional_rendering_ext( _command_buffer: CommandBuffer, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_conditional_rendering_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdEndConditionalRenderingEXT\0", ); let val = _f(cname); if val.is_null() { cmd_end_conditional_rendering_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_direct_mode_display"] pub mod direct_mode_display { use super::super::*; pub use { crate::vk::EXT_DIRECT_MODE_DISPLAY_NAME as NAME, crate::vk::EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_direct_mode_display instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_direct_mode_display instance-level function pointers"] pub struct InstanceFn { pub release_display_ext: PFN_vkReleaseDisplayEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { release_display_ext: unsafe { unsafe extern "system" fn release_display_ext( _physical_device: PhysicalDevice, _display: DisplayKHR, ) -> Result { panic!(concat!("Unable to load ", stringify!(release_display_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkReleaseDisplayEXT\0"); let val = _f(cname); if val.is_null() { release_display_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_acquire_xlib_display"] pub mod acquire_xlib_display { use super::super::*; pub use { crate::vk::EXT_ACQUIRE_XLIB_DISPLAY_NAME as NAME, crate::vk::EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_acquire_xlib_display instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_acquire_xlib_display instance-level function pointers"] pub struct InstanceFn { pub acquire_xlib_display_ext: PFN_vkAcquireXlibDisplayEXT, pub get_rand_r_output_display_ext: PFN_vkGetRandROutputDisplayEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { acquire_xlib_display_ext: unsafe { unsafe extern "system" fn acquire_xlib_display_ext( _physical_device: PhysicalDevice, _dpy: *mut Display, _display: DisplayKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_xlib_display_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireXlibDisplayEXT\0"); let val = _f(cname); if val.is_null() { acquire_xlib_display_ext } else { ::core::mem::transmute(val) } }, get_rand_r_output_display_ext: unsafe { unsafe extern "system" fn get_rand_r_output_display_ext( _physical_device: PhysicalDevice, _dpy: *mut Display, _rr_output: RROutput, _p_display: *mut DisplayKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_rand_r_output_display_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetRandROutputDisplayEXT\0"); let val = _f(cname); if val.is_null() { get_rand_r_output_display_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_display_surface_counter"] pub mod display_surface_counter { use super::super::*; pub use { crate::vk::EXT_DISPLAY_SURFACE_COUNTER_NAME as NAME, crate::vk::EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_display_surface_counter instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_display_surface_counter instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_surface_capabilities2_ext: PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_surface_capabilities2_ext: unsafe { unsafe extern "system" fn get_physical_device_surface_capabilities2_ext( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_surface_capabilities: *mut SurfaceCapabilities2EXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_capabilities2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceCapabilities2EXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_capabilities2_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_display_control"] pub mod display_control { use super::super::*; pub use { crate::vk::EXT_DISPLAY_CONTROL_NAME as NAME, crate::vk::EXT_DISPLAY_CONTROL_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_display_control device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_display_control device-level function pointers"] pub struct DeviceFn { pub display_power_control_ext: PFN_vkDisplayPowerControlEXT, pub register_device_event_ext: PFN_vkRegisterDeviceEventEXT, pub register_display_event_ext: PFN_vkRegisterDisplayEventEXT, pub get_swapchain_counter_ext: PFN_vkGetSwapchainCounterEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { display_power_control_ext: unsafe { unsafe extern "system" fn display_power_control_ext( _device: crate::vk::Device, _display: DisplayKHR, _p_display_power_info: *const DisplayPowerInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(display_power_control_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDisplayPowerControlEXT\0"); let val = _f(cname); if val.is_null() { display_power_control_ext } else { ::core::mem::transmute(val) } }, register_device_event_ext: unsafe { unsafe extern "system" fn register_device_event_ext( _device: crate::vk::Device, _p_device_event_info: *const DeviceEventInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_fence: *mut Fence, ) -> Result { panic!(concat!( "Unable to load ", stringify!(register_device_event_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkRegisterDeviceEventEXT\0"); let val = _f(cname); if val.is_null() { register_device_event_ext } else { ::core::mem::transmute(val) } }, register_display_event_ext: unsafe { unsafe extern "system" fn register_display_event_ext( _device: crate::vk::Device, _display: DisplayKHR, _p_display_event_info: *const DisplayEventInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_fence: *mut Fence, ) -> Result { panic!(concat!( "Unable to load ", stringify!(register_display_event_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkRegisterDisplayEventEXT\0"); let val = _f(cname); if val.is_null() { register_display_event_ext } else { ::core::mem::transmute(val) } }, get_swapchain_counter_ext: unsafe { unsafe extern "system" fn get_swapchain_counter_ext( _device: crate::vk::Device, _swapchain: SwapchainKHR, _counter: SurfaceCounterFlagsEXT, _p_counter_value: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_swapchain_counter_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainCounterEXT\0"); let val = _f(cname); if val.is_null() { get_swapchain_counter_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_discard_rectangles"] pub mod discard_rectangles { use super::super::*; pub use { crate::vk::EXT_DISCARD_RECTANGLES_NAME as NAME, crate::vk::EXT_DISCARD_RECTANGLES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_discard_rectangles device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_discard_rectangles device-level function pointers"] pub struct DeviceFn { pub cmd_set_discard_rectangle_ext: PFN_vkCmdSetDiscardRectangleEXT, pub cmd_set_discard_rectangle_enable_ext: PFN_vkCmdSetDiscardRectangleEnableEXT, pub cmd_set_discard_rectangle_mode_ext: PFN_vkCmdSetDiscardRectangleModeEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_discard_rectangle_ext: unsafe { unsafe extern "system" fn cmd_set_discard_rectangle_ext( _command_buffer: CommandBuffer, _first_discard_rectangle: u32, _discard_rectangle_count: u32, _p_discard_rectangles: *const Rect2D, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_discard_rectangle_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDiscardRectangleEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_discard_rectangle_ext } else { ::core::mem::transmute(val) } }, cmd_set_discard_rectangle_enable_ext: unsafe { unsafe extern "system" fn cmd_set_discard_rectangle_enable_ext( _command_buffer: CommandBuffer, _discard_rectangle_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_discard_rectangle_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDiscardRectangleEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_discard_rectangle_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_discard_rectangle_mode_ext: unsafe { unsafe extern "system" fn cmd_set_discard_rectangle_mode_ext( _command_buffer: CommandBuffer, _discard_rectangle_mode: DiscardRectangleModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_discard_rectangle_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDiscardRectangleModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_discard_rectangle_mode_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_conservative_rasterization"] pub mod conservative_rasterization { use super::super::*; pub use { crate::vk::EXT_CONSERVATIVE_RASTERIZATION_NAME as NAME, crate::vk::EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_depth_clip_enable"] pub mod depth_clip_enable { use super::super::*; pub use { crate::vk::EXT_DEPTH_CLIP_ENABLE_NAME as NAME, crate::vk::EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_swapchain_colorspace"] pub mod swapchain_colorspace { use super::super::*; pub use { crate::vk::EXT_SWAPCHAIN_COLORSPACE_NAME as NAME, crate::vk::EXT_SWAPCHAIN_COLORSPACE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_hdr_metadata"] pub mod hdr_metadata { use super::super::*; pub use { crate::vk::EXT_HDR_METADATA_NAME as NAME, crate::vk::EXT_HDR_METADATA_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_hdr_metadata device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_hdr_metadata device-level function pointers"] pub struct DeviceFn { pub set_hdr_metadata_ext: PFN_vkSetHdrMetadataEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { set_hdr_metadata_ext: unsafe { unsafe extern "system" fn set_hdr_metadata_ext( _device: crate::vk::Device, _swapchain_count: u32, _p_swapchains: *const SwapchainKHR, _p_metadata: *const HdrMetadataEXT<'_>, ) { panic!(concat!("Unable to load ", stringify!(set_hdr_metadata_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetHdrMetadataEXT\0"); let val = _f(cname); if val.is_null() { set_hdr_metadata_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_external_memory_dma_buf"] pub mod external_memory_dma_buf { use super::super::*; pub use { crate::vk::EXT_EXTERNAL_MEMORY_DMA_BUF_NAME as NAME, crate::vk::EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_queue_family_foreign"] pub mod queue_family_foreign { use super::super::*; pub use { crate::vk::EXT_QUEUE_FAMILY_FOREIGN_NAME as NAME, crate::vk::EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_debug_utils"] pub mod debug_utils { use super::super::*; pub use { crate::vk::EXT_DEBUG_UTILS_NAME as NAME, crate::vk::EXT_DEBUG_UTILS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_debug_utils instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_debug_utils instance-level function pointers"] pub struct InstanceFn { pub create_debug_utils_messenger_ext: PFN_vkCreateDebugUtilsMessengerEXT, pub destroy_debug_utils_messenger_ext: PFN_vkDestroyDebugUtilsMessengerEXT, pub submit_debug_utils_message_ext: PFN_vkSubmitDebugUtilsMessageEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_debug_utils_messenger_ext: unsafe { unsafe extern "system" fn create_debug_utils_messenger_ext( _instance: crate::vk::Instance, _p_create_info: *const DebugUtilsMessengerCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_messenger: *mut DebugUtilsMessengerEXT, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_debug_utils_messenger_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateDebugUtilsMessengerEXT\0", ); let val = _f(cname); if val.is_null() { create_debug_utils_messenger_ext } else { ::core::mem::transmute(val) } }, destroy_debug_utils_messenger_ext: unsafe { unsafe extern "system" fn destroy_debug_utils_messenger_ext( _instance: crate::vk::Instance, _messenger: DebugUtilsMessengerEXT, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_debug_utils_messenger_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyDebugUtilsMessengerEXT\0", ); let val = _f(cname); if val.is_null() { destroy_debug_utils_messenger_ext } else { ::core::mem::transmute(val) } }, submit_debug_utils_message_ext: unsafe { unsafe extern "system" fn submit_debug_utils_message_ext( _instance: crate::vk::Instance, _message_severity: DebugUtilsMessageSeverityFlagsEXT, _message_types: DebugUtilsMessageTypeFlagsEXT, _p_callback_data: *const DebugUtilsMessengerCallbackDataEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(submit_debug_utils_message_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSubmitDebugUtilsMessageEXT\0"); let val = _f(cname); if val.is_null() { submit_debug_utils_message_ext } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_EXT_debug_utils device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_debug_utils device-level function pointers"] pub struct DeviceFn { pub set_debug_utils_object_name_ext: PFN_vkSetDebugUtilsObjectNameEXT, pub set_debug_utils_object_tag_ext: PFN_vkSetDebugUtilsObjectTagEXT, pub queue_begin_debug_utils_label_ext: PFN_vkQueueBeginDebugUtilsLabelEXT, pub queue_end_debug_utils_label_ext: PFN_vkQueueEndDebugUtilsLabelEXT, pub queue_insert_debug_utils_label_ext: PFN_vkQueueInsertDebugUtilsLabelEXT, pub cmd_begin_debug_utils_label_ext: PFN_vkCmdBeginDebugUtilsLabelEXT, pub cmd_end_debug_utils_label_ext: PFN_vkCmdEndDebugUtilsLabelEXT, pub cmd_insert_debug_utils_label_ext: PFN_vkCmdInsertDebugUtilsLabelEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { set_debug_utils_object_name_ext: unsafe { unsafe extern "system" fn set_debug_utils_object_name_ext( _device: crate::vk::Device, _p_name_info: *const DebugUtilsObjectNameInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(set_debug_utils_object_name_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetDebugUtilsObjectNameEXT\0"); let val = _f(cname); if val.is_null() { set_debug_utils_object_name_ext } else { ::core::mem::transmute(val) } }, set_debug_utils_object_tag_ext: unsafe { unsafe extern "system" fn set_debug_utils_object_tag_ext( _device: crate::vk::Device, _p_tag_info: *const DebugUtilsObjectTagInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(set_debug_utils_object_tag_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetDebugUtilsObjectTagEXT\0"); let val = _f(cname); if val.is_null() { set_debug_utils_object_tag_ext } else { ::core::mem::transmute(val) } }, queue_begin_debug_utils_label_ext: unsafe { unsafe extern "system" fn queue_begin_debug_utils_label_ext( _queue: Queue, _p_label_info: *const DebugUtilsLabelEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(queue_begin_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkQueueBeginDebugUtilsLabelEXT\0", ); let val = _f(cname); if val.is_null() { queue_begin_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, queue_end_debug_utils_label_ext: unsafe { unsafe extern "system" fn queue_end_debug_utils_label_ext(_queue: Queue) { panic!(concat!( "Unable to load ", stringify!(queue_end_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueEndDebugUtilsLabelEXT\0"); let val = _f(cname); if val.is_null() { queue_end_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, queue_insert_debug_utils_label_ext: unsafe { unsafe extern "system" fn queue_insert_debug_utils_label_ext( _queue: Queue, _p_label_info: *const DebugUtilsLabelEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(queue_insert_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkQueueInsertDebugUtilsLabelEXT\0", ); let val = _f(cname); if val.is_null() { queue_insert_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, cmd_begin_debug_utils_label_ext: unsafe { unsafe extern "system" fn cmd_begin_debug_utils_label_ext( _command_buffer: CommandBuffer, _p_label_info: *const DebugUtilsLabelEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginDebugUtilsLabelEXT\0"); let val = _f(cname); if val.is_null() { cmd_begin_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, cmd_end_debug_utils_label_ext: unsafe { unsafe extern "system" fn cmd_end_debug_utils_label_ext( _command_buffer: CommandBuffer, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndDebugUtilsLabelEXT\0"); let val = _f(cname); if val.is_null() { cmd_end_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, cmd_insert_debug_utils_label_ext: unsafe { unsafe extern "system" fn cmd_insert_debug_utils_label_ext( _command_buffer: CommandBuffer, _p_label_info: *const DebugUtilsLabelEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_insert_debug_utils_label_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdInsertDebugUtilsLabelEXT\0"); let val = _f(cname); if val.is_null() { cmd_insert_debug_utils_label_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_sampler_filter_minmax"] pub mod sampler_filter_minmax { use super::super::*; pub use { crate::vk::EXT_SAMPLER_FILTER_MINMAX_NAME as NAME, crate::vk::EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_inline_uniform_block"] pub mod inline_uniform_block { use super::super::*; pub use { crate::vk::EXT_INLINE_UNIFORM_BLOCK_NAME as NAME, crate::vk::EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_stencil_export"] pub mod shader_stencil_export { use super::super::*; pub use { crate::vk::EXT_SHADER_STENCIL_EXPORT_NAME as NAME, crate::vk::EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_sample_locations"] pub mod sample_locations { use super::super::*; pub use { crate::vk::EXT_SAMPLE_LOCATIONS_NAME as NAME, crate::vk::EXT_SAMPLE_LOCATIONS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_sample_locations instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_sample_locations instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_multisample_properties_ext: PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_multisample_properties_ext: unsafe { unsafe extern "system" fn get_physical_device_multisample_properties_ext( _physical_device: PhysicalDevice, _samples: SampleCountFlags, _p_multisample_properties: *mut MultisamplePropertiesEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_multisample_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceMultisamplePropertiesEXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_multisample_properties_ext } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_EXT_sample_locations device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_sample_locations device-level function pointers"] pub struct DeviceFn { pub cmd_set_sample_locations_ext: PFN_vkCmdSetSampleLocationsEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_sample_locations_ext: unsafe { unsafe extern "system" fn cmd_set_sample_locations_ext( _command_buffer: CommandBuffer, _p_sample_locations_info: *const SampleLocationsInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_sample_locations_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetSampleLocationsEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_sample_locations_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_blend_operation_advanced"] pub mod blend_operation_advanced { use super::super::*; pub use { crate::vk::EXT_BLEND_OPERATION_ADVANCED_NAME as NAME, crate::vk::EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_post_depth_coverage"] pub mod post_depth_coverage { use super::super::*; pub use { crate::vk::EXT_POST_DEPTH_COVERAGE_NAME as NAME, crate::vk::EXT_POST_DEPTH_COVERAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_image_drm_format_modifier"] pub mod image_drm_format_modifier { use super::super::*; pub use { crate::vk::EXT_IMAGE_DRM_FORMAT_MODIFIER_NAME as NAME, crate::vk::EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_image_drm_format_modifier device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_image_drm_format_modifier device-level function pointers"] pub struct DeviceFn { pub get_image_drm_format_modifier_properties_ext: PFN_vkGetImageDrmFormatModifierPropertiesEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_image_drm_format_modifier_properties_ext: unsafe { unsafe extern "system" fn get_image_drm_format_modifier_properties_ext( _device: crate::vk::Device, _image: Image, _p_properties: *mut ImageDrmFormatModifierPropertiesEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_image_drm_format_modifier_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageDrmFormatModifierPropertiesEXT\0", ); let val = _f(cname); if val.is_null() { get_image_drm_format_modifier_properties_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_validation_cache"] pub mod validation_cache { use super::super::*; pub use { crate::vk::EXT_VALIDATION_CACHE_NAME as NAME, crate::vk::EXT_VALIDATION_CACHE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_validation_cache device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_validation_cache device-level function pointers"] pub struct DeviceFn { pub create_validation_cache_ext: PFN_vkCreateValidationCacheEXT, pub destroy_validation_cache_ext: PFN_vkDestroyValidationCacheEXT, pub merge_validation_caches_ext: PFN_vkMergeValidationCachesEXT, pub get_validation_cache_data_ext: PFN_vkGetValidationCacheDataEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_validation_cache_ext: unsafe { unsafe extern "system" fn create_validation_cache_ext( _device: crate::vk::Device, _p_create_info: *const ValidationCacheCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_validation_cache: *mut ValidationCacheEXT, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_validation_cache_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateValidationCacheEXT\0"); let val = _f(cname); if val.is_null() { create_validation_cache_ext } else { ::core::mem::transmute(val) } }, destroy_validation_cache_ext: unsafe { unsafe extern "system" fn destroy_validation_cache_ext( _device: crate::vk::Device, _validation_cache: ValidationCacheEXT, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_validation_cache_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyValidationCacheEXT\0"); let val = _f(cname); if val.is_null() { destroy_validation_cache_ext } else { ::core::mem::transmute(val) } }, merge_validation_caches_ext: unsafe { unsafe extern "system" fn merge_validation_caches_ext( _device: crate::vk::Device, _dst_cache: ValidationCacheEXT, _src_cache_count: u32, _p_src_caches: *const ValidationCacheEXT, ) -> Result { panic!(concat!( "Unable to load ", stringify!(merge_validation_caches_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkMergeValidationCachesEXT\0"); let val = _f(cname); if val.is_null() { merge_validation_caches_ext } else { ::core::mem::transmute(val) } }, get_validation_cache_data_ext: unsafe { unsafe extern "system" fn get_validation_cache_data_ext( _device: crate::vk::Device, _validation_cache: ValidationCacheEXT, _p_data_size: *mut usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_validation_cache_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetValidationCacheDataEXT\0"); let val = _f(cname); if val.is_null() { get_validation_cache_data_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_descriptor_indexing"] pub mod descriptor_indexing { use super::super::*; pub use { crate::vk::EXT_DESCRIPTOR_INDEXING_NAME as NAME, crate::vk::EXT_DESCRIPTOR_INDEXING_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_viewport_index_layer"] pub mod shader_viewport_index_layer { use super::super::*; pub use { crate::vk::EXT_SHADER_VIEWPORT_INDEX_LAYER_NAME as NAME, crate::vk::EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_filter_cubic"] pub mod filter_cubic { use super::super::*; pub use { crate::vk::EXT_FILTER_CUBIC_NAME as NAME, crate::vk::EXT_FILTER_CUBIC_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_global_priority"] pub mod global_priority { use super::super::*; pub use { crate::vk::EXT_GLOBAL_PRIORITY_NAME as NAME, crate::vk::EXT_GLOBAL_PRIORITY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_external_memory_host"] pub mod external_memory_host { use super::super::*; pub use { crate::vk::EXT_EXTERNAL_MEMORY_HOST_NAME as NAME, crate::vk::EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_external_memory_host device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_external_memory_host device-level function pointers"] pub struct DeviceFn { pub get_memory_host_pointer_properties_ext: PFN_vkGetMemoryHostPointerPropertiesEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_host_pointer_properties_ext: unsafe { unsafe extern "system" fn get_memory_host_pointer_properties_ext( _device: crate::vk::Device, _handle_type: ExternalMemoryHandleTypeFlags, _p_host_pointer: *const c_void, _p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_host_pointer_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetMemoryHostPointerPropertiesEXT\0", ); let val = _f(cname); if val.is_null() { get_memory_host_pointer_properties_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_calibrated_timestamps"] pub mod calibrated_timestamps { use super::super::*; pub use { crate::vk::EXT_CALIBRATED_TIMESTAMPS_NAME as NAME, crate::vk::EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_calibrated_timestamps instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_calibrated_timestamps instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_calibrateable_time_domains_ext: PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_calibrateable_time_domains_ext: unsafe { unsafe extern "system" fn get_physical_device_calibrateable_time_domains_ext( _physical_device: PhysicalDevice, _p_time_domain_count: *mut u32, _p_time_domains: *mut TimeDomainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_calibrateable_time_domains_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_calibrateable_time_domains_ext } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_EXT_calibrated_timestamps device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_calibrated_timestamps device-level function pointers"] pub struct DeviceFn { pub get_calibrated_timestamps_ext: PFN_vkGetCalibratedTimestampsKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_calibrated_timestamps_ext: unsafe { unsafe extern "system" fn get_calibrated_timestamps_ext( _device: crate::vk::Device, _timestamp_count: u32, _p_timestamp_infos: *const CalibratedTimestampInfoKHR<'_>, _p_timestamps: *mut u64, _p_max_deviation: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_calibrated_timestamps_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetCalibratedTimestampsEXT\0"); let val = _f(cname); if val.is_null() { get_calibrated_timestamps_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_vertex_attribute_divisor"] pub mod vertex_attribute_divisor { use super::super::*; pub use { crate::vk::EXT_VERTEX_ATTRIBUTE_DIVISOR_NAME as NAME, crate::vk::EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pipeline_creation_feedback"] pub mod pipeline_creation_feedback { use super::super::*; pub use { crate::vk::EXT_PIPELINE_CREATION_FEEDBACK_NAME as NAME, crate::vk::EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pci_bus_info"] pub mod pci_bus_info { use super::super::*; pub use { crate::vk::EXT_PCI_BUS_INFO_NAME as NAME, crate::vk::EXT_PCI_BUS_INFO_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_metal_surface"] pub mod metal_surface { use super::super::*; pub use { crate::vk::EXT_METAL_SURFACE_NAME as NAME, crate::vk::EXT_METAL_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_metal_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_metal_surface instance-level function pointers"] pub struct InstanceFn { pub create_metal_surface_ext: PFN_vkCreateMetalSurfaceEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_metal_surface_ext: unsafe { unsafe extern "system" fn create_metal_surface_ext( _instance: crate::vk::Instance, _p_create_info: *const MetalSurfaceCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_metal_surface_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateMetalSurfaceEXT\0"); let val = _f(cname); if val.is_null() { create_metal_surface_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_fragment_density_map"] pub mod fragment_density_map { use super::super::*; pub use { crate::vk::EXT_FRAGMENT_DENSITY_MAP_NAME as NAME, crate::vk::EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_scalar_block_layout"] pub mod scalar_block_layout { use super::super::*; pub use { crate::vk::EXT_SCALAR_BLOCK_LAYOUT_NAME as NAME, crate::vk::EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_subgroup_size_control"] pub mod subgroup_size_control { use super::super::*; pub use { crate::vk::EXT_SUBGROUP_SIZE_CONTROL_NAME as NAME, crate::vk::EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_image_atomic_int64"] pub mod shader_image_atomic_int64 { use super::super::*; pub use { crate::vk::EXT_SHADER_IMAGE_ATOMIC_INT64_NAME as NAME, crate::vk::EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_memory_budget"] pub mod memory_budget { use super::super::*; pub use { crate::vk::EXT_MEMORY_BUDGET_NAME as NAME, crate::vk::EXT_MEMORY_BUDGET_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_memory_priority"] pub mod memory_priority { use super::super::*; pub use { crate::vk::EXT_MEMORY_PRIORITY_NAME as NAME, crate::vk::EXT_MEMORY_PRIORITY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_buffer_device_address"] pub mod buffer_device_address { use super::super::*; pub use { crate::vk::EXT_BUFFER_DEVICE_ADDRESS_NAME as NAME, crate::vk::EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_buffer_device_address device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_buffer_device_address device-level function pointers"] pub struct DeviceFn { pub get_buffer_device_address_ext: PFN_vkGetBufferDeviceAddress, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_buffer_device_address_ext: unsafe { unsafe extern "system" fn get_buffer_device_address_ext( _device: crate::vk::Device, _p_info: *const BufferDeviceAddressInfo<'_>, ) -> DeviceAddress { panic!(concat!( "Unable to load ", stringify!(get_buffer_device_address_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferDeviceAddressEXT\0"); let val = _f(cname); if val.is_null() { get_buffer_device_address_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_tooling_info"] pub mod tooling_info { use super::super::*; pub use { crate::vk::EXT_TOOLING_INFO_NAME as NAME, crate::vk::EXT_TOOLING_INFO_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_tooling_info instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_tooling_info instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_tool_properties_ext: PFN_vkGetPhysicalDeviceToolProperties, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_tool_properties_ext: unsafe { unsafe extern "system" fn get_physical_device_tool_properties_ext( _physical_device: PhysicalDevice, _p_tool_count: *mut u32, _p_tool_properties: *mut PhysicalDeviceToolProperties<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_tool_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceToolPropertiesEXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_tool_properties_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_separate_stencil_usage"] pub mod separate_stencil_usage { use super::super::*; pub use { crate::vk::EXT_SEPARATE_STENCIL_USAGE_NAME as NAME, crate::vk::EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_validation_features"] pub mod validation_features { use super::super::*; pub use { crate::vk::EXT_VALIDATION_FEATURES_NAME as NAME, crate::vk::EXT_VALIDATION_FEATURES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_fragment_shader_interlock"] pub mod fragment_shader_interlock { use super::super::*; pub use { crate::vk::EXT_FRAGMENT_SHADER_INTERLOCK_NAME as NAME, crate::vk::EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_ycbcr_image_arrays"] pub mod ycbcr_image_arrays { use super::super::*; pub use { crate::vk::EXT_YCBCR_IMAGE_ARRAYS_NAME as NAME, crate::vk::EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_provoking_vertex"] pub mod provoking_vertex { use super::super::*; pub use { crate::vk::EXT_PROVOKING_VERTEX_NAME as NAME, crate::vk::EXT_PROVOKING_VERTEX_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_full_screen_exclusive"] pub mod full_screen_exclusive { use super::super::*; pub use { crate::vk::EXT_FULL_SCREEN_EXCLUSIVE_NAME as NAME, crate::vk::EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_full_screen_exclusive instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_full_screen_exclusive instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_surface_present_modes2_ext: PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_surface_present_modes2_ext: unsafe { unsafe extern "system" fn get_physical_device_surface_present_modes2_ext( _physical_device: PhysicalDevice, _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, _p_present_mode_count: *mut u32, _p_present_modes: *mut PresentModeKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_present_modes2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfacePresentModes2EXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_present_modes2_ext } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_EXT_full_screen_exclusive device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_full_screen_exclusive device-level function pointers"] pub struct DeviceFn { pub acquire_full_screen_exclusive_mode_ext: PFN_vkAcquireFullScreenExclusiveModeEXT, pub release_full_screen_exclusive_mode_ext: PFN_vkReleaseFullScreenExclusiveModeEXT, pub get_device_group_surface_present_modes2_ext: PFN_vkGetDeviceGroupSurfacePresentModes2EXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { acquire_full_screen_exclusive_mode_ext: unsafe { unsafe extern "system" fn acquire_full_screen_exclusive_mode_ext( _device: crate::vk::Device, _swapchain: SwapchainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_full_screen_exclusive_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkAcquireFullScreenExclusiveModeEXT\0", ); let val = _f(cname); if val.is_null() { acquire_full_screen_exclusive_mode_ext } else { ::core::mem::transmute(val) } }, release_full_screen_exclusive_mode_ext: unsafe { unsafe extern "system" fn release_full_screen_exclusive_mode_ext( _device: crate::vk::Device, _swapchain: SwapchainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(release_full_screen_exclusive_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkReleaseFullScreenExclusiveModeEXT\0", ); let val = _f(cname); if val.is_null() { release_full_screen_exclusive_mode_ext } else { ::core::mem::transmute(val) } }, get_device_group_surface_present_modes2_ext: unsafe { unsafe extern "system" fn get_device_group_surface_present_modes2_ext( _device: crate::vk::Device, _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, _p_modes: *mut DeviceGroupPresentModeFlagsKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_group_surface_present_modes2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupSurfacePresentModes2EXT\0", ); let val = _f(cname); if val.is_null() { get_device_group_surface_present_modes2_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_headless_surface"] pub mod headless_surface { use super::super::*; pub use { crate::vk::EXT_HEADLESS_SURFACE_NAME as NAME, crate::vk::EXT_HEADLESS_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_headless_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_headless_surface instance-level function pointers"] pub struct InstanceFn { pub create_headless_surface_ext: PFN_vkCreateHeadlessSurfaceEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_headless_surface_ext: unsafe { unsafe extern "system" fn create_headless_surface_ext( _instance: crate::vk::Instance, _p_create_info: *const HeadlessSurfaceCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_headless_surface_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateHeadlessSurfaceEXT\0"); let val = _f(cname); if val.is_null() { create_headless_surface_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_line_rasterization"] pub mod line_rasterization { use super::super::*; pub use { crate::vk::EXT_LINE_RASTERIZATION_NAME as NAME, crate::vk::EXT_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_line_rasterization device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_line_rasterization device-level function pointers"] pub struct DeviceFn { pub cmd_set_line_stipple_ext: PFN_vkCmdSetLineStippleKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_line_stipple_ext: unsafe { unsafe extern "system" fn cmd_set_line_stipple_ext( _command_buffer: CommandBuffer, _line_stipple_factor: u32, _line_stipple_pattern: u16, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_stipple_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineStippleEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_line_stipple_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_shader_atomic_float"] pub mod shader_atomic_float { use super::super::*; pub use { crate::vk::EXT_SHADER_ATOMIC_FLOAT_NAME as NAME, crate::vk::EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_host_query_reset"] pub mod host_query_reset { use super::super::*; pub use { crate::vk::EXT_HOST_QUERY_RESET_NAME as NAME, crate::vk::EXT_HOST_QUERY_RESET_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_host_query_reset device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_host_query_reset device-level function pointers"] pub struct DeviceFn { pub reset_query_pool_ext: PFN_vkResetQueryPool, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { reset_query_pool_ext: unsafe { unsafe extern "system" fn reset_query_pool_ext( _device: crate::vk::Device, _query_pool: QueryPool, _first_query: u32, _query_count: u32, ) { panic!(concat!("Unable to load ", stringify!(reset_query_pool_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetQueryPoolEXT\0"); let val = _f(cname); if val.is_null() { reset_query_pool_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_index_type_uint8"] pub mod index_type_uint8 { use super::super::*; pub use { crate::vk::EXT_INDEX_TYPE_UINT8_NAME as NAME, crate::vk::EXT_INDEX_TYPE_UINT8_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_extended_dynamic_state"] pub mod extended_dynamic_state { use super::super::*; pub use { crate::vk::EXT_EXTENDED_DYNAMIC_STATE_NAME as NAME, crate::vk::EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_extended_dynamic_state device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_extended_dynamic_state device-level function pointers"] pub struct DeviceFn { pub cmd_set_cull_mode_ext: PFN_vkCmdSetCullMode, pub cmd_set_front_face_ext: PFN_vkCmdSetFrontFace, pub cmd_set_primitive_topology_ext: PFN_vkCmdSetPrimitiveTopology, pub cmd_set_viewport_with_count_ext: PFN_vkCmdSetViewportWithCount, pub cmd_set_scissor_with_count_ext: PFN_vkCmdSetScissorWithCount, pub cmd_bind_vertex_buffers2_ext: PFN_vkCmdBindVertexBuffers2, pub cmd_set_depth_test_enable_ext: PFN_vkCmdSetDepthTestEnable, pub cmd_set_depth_write_enable_ext: PFN_vkCmdSetDepthWriteEnable, pub cmd_set_depth_compare_op_ext: PFN_vkCmdSetDepthCompareOp, pub cmd_set_depth_bounds_test_enable_ext: PFN_vkCmdSetDepthBoundsTestEnable, pub cmd_set_stencil_test_enable_ext: PFN_vkCmdSetStencilTestEnable, pub cmd_set_stencil_op_ext: PFN_vkCmdSetStencilOp, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_cull_mode_ext: unsafe { unsafe extern "system" fn cmd_set_cull_mode_ext( _command_buffer: CommandBuffer, _cull_mode: CullModeFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_cull_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCullModeEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_cull_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_front_face_ext: unsafe { unsafe extern "system" fn cmd_set_front_face_ext( _command_buffer: CommandBuffer, _front_face: FrontFace, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_front_face_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetFrontFaceEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_front_face_ext } else { ::core::mem::transmute(val) } }, cmd_set_primitive_topology_ext: unsafe { unsafe extern "system" fn cmd_set_primitive_topology_ext( _command_buffer: CommandBuffer, _primitive_topology: PrimitiveTopology, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_topology_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPrimitiveTopologyEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_primitive_topology_ext } else { ::core::mem::transmute(val) } }, cmd_set_viewport_with_count_ext: unsafe { unsafe extern "system" fn cmd_set_viewport_with_count_ext( _command_buffer: CommandBuffer, _viewport_count: u32, _p_viewports: *const Viewport, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_with_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportWithCountEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_with_count_ext } else { ::core::mem::transmute(val) } }, cmd_set_scissor_with_count_ext: unsafe { unsafe extern "system" fn cmd_set_scissor_with_count_ext( _command_buffer: CommandBuffer, _scissor_count: u32, _p_scissors: *const Rect2D, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_scissor_with_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissorWithCountEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_scissor_with_count_ext } else { ::core::mem::transmute(val) } }, cmd_bind_vertex_buffers2_ext: unsafe { unsafe extern "system" fn cmd_bind_vertex_buffers2_ext( _command_buffer: CommandBuffer, _first_binding: u32, _binding_count: u32, _p_buffers: *const Buffer, _p_offsets: *const DeviceSize, _p_sizes: *const DeviceSize, _p_strides: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_vertex_buffers2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers2EXT\0"); let val = _f(cname); if val.is_null() { cmd_bind_vertex_buffers2_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_test_enable_ext( _command_buffer: CommandBuffer, _depth_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthTestEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_write_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_write_enable_ext( _command_buffer: CommandBuffer, _depth_write_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_write_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthWriteEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_write_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_compare_op_ext: unsafe { unsafe extern "system" fn cmd_set_depth_compare_op_ext( _command_buffer: CommandBuffer, _depth_compare_op: CompareOp, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_compare_op_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthCompareOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_compare_op_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_bounds_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_bounds_test_enable_ext( _command_buffer: CommandBuffer, _depth_bounds_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bounds_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDepthBoundsTestEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_depth_bounds_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_stencil_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_stencil_test_enable_ext( _command_buffer: CommandBuffer, _stencil_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilTestEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_stencil_op_ext: unsafe { unsafe extern "system" fn cmd_set_stencil_op_ext( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _fail_op: StencilOp, _pass_op: StencilOp, _depth_fail_op: StencilOp, _compare_op: CompareOp, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_op_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_op_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_host_image_copy"] pub mod host_image_copy { use super::super::*; pub use { crate::vk::EXT_HOST_IMAGE_COPY_NAME as NAME, crate::vk::EXT_HOST_IMAGE_COPY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_host_image_copy device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_host_image_copy device-level function pointers"] pub struct DeviceFn { pub copy_memory_to_image_ext: PFN_vkCopyMemoryToImageEXT, pub copy_image_to_memory_ext: PFN_vkCopyImageToMemoryEXT, pub copy_image_to_image_ext: PFN_vkCopyImageToImageEXT, pub transition_image_layout_ext: PFN_vkTransitionImageLayoutEXT, pub get_image_subresource_layout2_ext: PFN_vkGetImageSubresourceLayout2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { copy_memory_to_image_ext: unsafe { unsafe extern "system" fn copy_memory_to_image_ext( _device: crate::vk::Device, _p_copy_memory_to_image_info: *const CopyMemoryToImageInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_memory_to_image_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyMemoryToImageEXT\0"); let val = _f(cname); if val.is_null() { copy_memory_to_image_ext } else { ::core::mem::transmute(val) } }, copy_image_to_memory_ext: unsafe { unsafe extern "system" fn copy_image_to_memory_ext( _device: crate::vk::Device, _p_copy_image_to_memory_info: *const CopyImageToMemoryInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_image_to_memory_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyImageToMemoryEXT\0"); let val = _f(cname); if val.is_null() { copy_image_to_memory_ext } else { ::core::mem::transmute(val) } }, copy_image_to_image_ext: unsafe { unsafe extern "system" fn copy_image_to_image_ext( _device: crate::vk::Device, _p_copy_image_to_image_info: *const CopyImageToImageInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_image_to_image_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyImageToImageEXT\0"); let val = _f(cname); if val.is_null() { copy_image_to_image_ext } else { ::core::mem::transmute(val) } }, transition_image_layout_ext: unsafe { unsafe extern "system" fn transition_image_layout_ext( _device: crate::vk::Device, _transition_count: u32, _p_transitions: *const HostImageLayoutTransitionInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(transition_image_layout_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkTransitionImageLayoutEXT\0"); let val = _f(cname); if val.is_null() { transition_image_layout_ext } else { ::core::mem::transmute(val) } }, get_image_subresource_layout2_ext: unsafe { unsafe extern "system" fn get_image_subresource_layout2_ext( _device: crate::vk::Device, _image: Image, _p_subresource: *const ImageSubresource2KHR<'_>, _p_layout: *mut SubresourceLayout2KHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_subresource_layout2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageSubresourceLayout2EXT\0", ); let val = _f(cname); if val.is_null() { get_image_subresource_layout2_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_map_memory_placed"] pub mod map_memory_placed { use super::super::*; pub use { crate::vk::EXT_MAP_MEMORY_PLACED_NAME as NAME, crate::vk::EXT_MAP_MEMORY_PLACED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_atomic_float2"] pub mod shader_atomic_float2 { use super::super::*; pub use { crate::vk::EXT_SHADER_ATOMIC_FLOAT2_NAME as NAME, crate::vk::EXT_SHADER_ATOMIC_FLOAT2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_surface_maintenance1"] pub mod surface_maintenance1 { use super::super::*; pub use { crate::vk::EXT_SURFACE_MAINTENANCE1_NAME as NAME, crate::vk::EXT_SURFACE_MAINTENANCE1_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_swapchain_maintenance1"] pub mod swapchain_maintenance1 { use super::super::*; pub use { crate::vk::EXT_SWAPCHAIN_MAINTENANCE1_NAME as NAME, crate::vk::EXT_SWAPCHAIN_MAINTENANCE1_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_swapchain_maintenance1 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_swapchain_maintenance1 device-level function pointers"] pub struct DeviceFn { pub release_swapchain_images_ext: PFN_vkReleaseSwapchainImagesEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { release_swapchain_images_ext: unsafe { unsafe extern "system" fn release_swapchain_images_ext( _device: crate::vk::Device, _p_release_info: *const ReleaseSwapchainImagesInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(release_swapchain_images_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkReleaseSwapchainImagesEXT\0"); let val = _f(cname); if val.is_null() { release_swapchain_images_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_shader_demote_to_helper_invocation"] pub mod shader_demote_to_helper_invocation { use super::super::*; pub use { crate::vk::EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_NAME as NAME, crate::vk::EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_texel_buffer_alignment"] pub mod texel_buffer_alignment { use super::super::*; pub use { crate::vk::EXT_TEXEL_BUFFER_ALIGNMENT_NAME as NAME, crate::vk::EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_depth_bias_control"] pub mod depth_bias_control { use super::super::*; pub use { crate::vk::EXT_DEPTH_BIAS_CONTROL_NAME as NAME, crate::vk::EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_depth_bias_control device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_depth_bias_control device-level function pointers"] pub struct DeviceFn { pub cmd_set_depth_bias2_ext: PFN_vkCmdSetDepthBias2EXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_depth_bias2_ext: unsafe { unsafe extern "system" fn cmd_set_depth_bias2_ext( _command_buffer: CommandBuffer, _p_depth_bias_info: *const DepthBiasInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bias2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBias2EXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bias2_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_device_memory_report"] pub mod device_memory_report { use super::super::*; pub use { crate::vk::EXT_DEVICE_MEMORY_REPORT_NAME as NAME, crate::vk::EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_acquire_drm_display"] pub mod acquire_drm_display { use super::super::*; pub use { crate::vk::EXT_ACQUIRE_DRM_DISPLAY_NAME as NAME, crate::vk::EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_acquire_drm_display instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_acquire_drm_display instance-level function pointers"] pub struct InstanceFn { pub acquire_drm_display_ext: PFN_vkAcquireDrmDisplayEXT, pub get_drm_display_ext: PFN_vkGetDrmDisplayEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { acquire_drm_display_ext: unsafe { unsafe extern "system" fn acquire_drm_display_ext( _physical_device: PhysicalDevice, _drm_fd: i32, _display: DisplayKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_drm_display_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireDrmDisplayEXT\0"); let val = _f(cname); if val.is_null() { acquire_drm_display_ext } else { ::core::mem::transmute(val) } }, get_drm_display_ext: unsafe { unsafe extern "system" fn get_drm_display_ext( _physical_device: PhysicalDevice, _drm_fd: i32, _connector_id: u32, _display: *mut DisplayKHR, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_drm_display_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDrmDisplayEXT\0"); let val = _f(cname); if val.is_null() { get_drm_display_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_robustness2"] pub mod robustness2 { use super::super::*; pub use { crate::vk::EXT_ROBUSTNESS2_NAME as NAME, crate::vk::EXT_ROBUSTNESS2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_custom_border_color"] pub mod custom_border_color { use super::super::*; pub use { crate::vk::EXT_CUSTOM_BORDER_COLOR_NAME as NAME, crate::vk::EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_private_data"] pub mod private_data { use super::super::*; pub use { crate::vk::EXT_PRIVATE_DATA_NAME as NAME, crate::vk::EXT_PRIVATE_DATA_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_private_data device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_private_data device-level function pointers"] pub struct DeviceFn { pub create_private_data_slot_ext: PFN_vkCreatePrivateDataSlot, pub destroy_private_data_slot_ext: PFN_vkDestroyPrivateDataSlot, pub set_private_data_ext: PFN_vkSetPrivateData, pub get_private_data_ext: PFN_vkGetPrivateData, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_private_data_slot_ext: unsafe { unsafe extern "system" fn create_private_data_slot_ext( _device: crate::vk::Device, _p_create_info: *const PrivateDataSlotCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_private_data_slot: *mut PrivateDataSlot, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_private_data_slot_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreatePrivateDataSlotEXT\0"); let val = _f(cname); if val.is_null() { create_private_data_slot_ext } else { ::core::mem::transmute(val) } }, destroy_private_data_slot_ext: unsafe { unsafe extern "system" fn destroy_private_data_slot_ext( _device: crate::vk::Device, _private_data_slot: PrivateDataSlot, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_private_data_slot_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyPrivateDataSlotEXT\0"); let val = _f(cname); if val.is_null() { destroy_private_data_slot_ext } else { ::core::mem::transmute(val) } }, set_private_data_ext: unsafe { unsafe extern "system" fn set_private_data_ext( _device: crate::vk::Device, _object_type: ObjectType, _object_handle: u64, _private_data_slot: PrivateDataSlot, _data: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(set_private_data_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetPrivateDataEXT\0"); let val = _f(cname); if val.is_null() { set_private_data_ext } else { ::core::mem::transmute(val) } }, get_private_data_ext: unsafe { unsafe extern "system" fn get_private_data_ext( _device: crate::vk::Device, _object_type: ObjectType, _object_handle: u64, _private_data_slot: PrivateDataSlot, _p_data: *mut u64, ) { panic!(concat!("Unable to load ", stringify!(get_private_data_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPrivateDataEXT\0"); let val = _f(cname); if val.is_null() { get_private_data_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_pipeline_creation_cache_control"] pub mod pipeline_creation_cache_control { use super::super::*; pub use { crate::vk::EXT_PIPELINE_CREATION_CACHE_CONTROL_NAME as NAME, crate::vk::EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_metal_objects"] pub mod metal_objects { use super::super::*; pub use { crate::vk::EXT_METAL_OBJECTS_NAME as NAME, crate::vk::EXT_METAL_OBJECTS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_metal_objects device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_metal_objects device-level function pointers"] pub struct DeviceFn { pub export_metal_objects_ext: PFN_vkExportMetalObjectsEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { export_metal_objects_ext: unsafe { unsafe extern "system" fn export_metal_objects_ext( _device: crate::vk::Device, _p_metal_objects_info: *mut ExportMetalObjectsInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(export_metal_objects_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkExportMetalObjectsEXT\0"); let val = _f(cname); if val.is_null() { export_metal_objects_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_descriptor_buffer"] pub mod descriptor_buffer { use super::super::*; pub use { crate::vk::EXT_DESCRIPTOR_BUFFER_NAME as NAME, crate::vk::EXT_DESCRIPTOR_BUFFER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_descriptor_buffer device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_descriptor_buffer device-level function pointers"] pub struct DeviceFn { pub get_descriptor_set_layout_size_ext: PFN_vkGetDescriptorSetLayoutSizeEXT, pub get_descriptor_set_layout_binding_offset_ext: PFN_vkGetDescriptorSetLayoutBindingOffsetEXT, pub get_descriptor_ext: PFN_vkGetDescriptorEXT, pub cmd_bind_descriptor_buffers_ext: PFN_vkCmdBindDescriptorBuffersEXT, pub cmd_set_descriptor_buffer_offsets_ext: PFN_vkCmdSetDescriptorBufferOffsetsEXT, pub cmd_bind_descriptor_buffer_embedded_samplers_ext: PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT, pub get_buffer_opaque_capture_descriptor_data_ext: PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT, pub get_image_opaque_capture_descriptor_data_ext: PFN_vkGetImageOpaqueCaptureDescriptorDataEXT, pub get_image_view_opaque_capture_descriptor_data_ext: PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT, pub get_sampler_opaque_capture_descriptor_data_ext: PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT, pub get_acceleration_structure_opaque_capture_descriptor_data_ext: PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_descriptor_set_layout_size_ext: unsafe { unsafe extern "system" fn get_descriptor_set_layout_size_ext( _device: crate::vk::Device, _layout: DescriptorSetLayout, _p_layout_size_in_bytes: *mut DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_layout_size_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDescriptorSetLayoutSizeEXT\0", ); let val = _f(cname); if val.is_null() { get_descriptor_set_layout_size_ext } else { ::core::mem::transmute(val) } }, get_descriptor_set_layout_binding_offset_ext: unsafe { unsafe extern "system" fn get_descriptor_set_layout_binding_offset_ext( _device: crate::vk::Device, _layout: DescriptorSetLayout, _binding: u32, _p_offset: *mut DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_layout_binding_offset_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDescriptorSetLayoutBindingOffsetEXT\0", ); let val = _f(cname); if val.is_null() { get_descriptor_set_layout_binding_offset_ext } else { ::core::mem::transmute(val) } }, get_descriptor_ext: unsafe { unsafe extern "system" fn get_descriptor_ext( _device: crate::vk::Device, _p_descriptor_info: *const DescriptorGetInfoEXT<'_>, _data_size: usize, _p_descriptor: *mut c_void, ) { panic!(concat!("Unable to load ", stringify!(get_descriptor_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDescriptorEXT\0"); let val = _f(cname); if val.is_null() { get_descriptor_ext } else { ::core::mem::transmute(val) } }, cmd_bind_descriptor_buffers_ext: unsafe { unsafe extern "system" fn cmd_bind_descriptor_buffers_ext( _command_buffer: CommandBuffer, _buffer_count: u32, _p_binding_infos: *const DescriptorBufferBindingInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_descriptor_buffers_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindDescriptorBuffersEXT\0"); let val = _f(cname); if val.is_null() { cmd_bind_descriptor_buffers_ext } else { ::core::mem::transmute(val) } }, cmd_set_descriptor_buffer_offsets_ext: unsafe { unsafe extern "system" fn cmd_set_descriptor_buffer_offsets_ext( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _first_set: u32, _set_count: u32, _p_buffer_indices: *const u32, _p_offsets: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_descriptor_buffer_offsets_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDescriptorBufferOffsetsEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_descriptor_buffer_offsets_ext } else { ::core::mem::transmute(val) } }, cmd_bind_descriptor_buffer_embedded_samplers_ext: unsafe { unsafe extern "system" fn cmd_bind_descriptor_buffer_embedded_samplers_ext( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _set: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_descriptor_buffer_embedded_samplers_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBindDescriptorBufferEmbeddedSamplersEXT\0", ); let val = _f(cname); if val.is_null() { cmd_bind_descriptor_buffer_embedded_samplers_ext } else { ::core::mem::transmute(val) } }, get_buffer_opaque_capture_descriptor_data_ext: unsafe { unsafe extern "system" fn get_buffer_opaque_capture_descriptor_data_ext( _device: crate::vk::Device, _p_info: *const BufferCaptureDescriptorDataInfoEXT<'_>, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_buffer_opaque_capture_descriptor_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetBufferOpaqueCaptureDescriptorDataEXT\0", ); let val = _f(cname); if val.is_null() { get_buffer_opaque_capture_descriptor_data_ext } else { ::core::mem::transmute(val) } }, get_image_opaque_capture_descriptor_data_ext: unsafe { unsafe extern "system" fn get_image_opaque_capture_descriptor_data_ext( _device: crate::vk::Device, _p_info: *const ImageCaptureDescriptorDataInfoEXT<'_>, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_image_opaque_capture_descriptor_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageOpaqueCaptureDescriptorDataEXT\0", ); let val = _f(cname); if val.is_null() { get_image_opaque_capture_descriptor_data_ext } else { ::core::mem::transmute(val) } }, get_image_view_opaque_capture_descriptor_data_ext: unsafe { unsafe extern "system" fn get_image_view_opaque_capture_descriptor_data_ext( _device: crate::vk::Device, _p_info: *const ImageViewCaptureDescriptorDataInfoEXT<'_>, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_image_view_opaque_capture_descriptor_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageViewOpaqueCaptureDescriptorDataEXT\0", ); let val = _f(cname); if val.is_null() { get_image_view_opaque_capture_descriptor_data_ext } else { ::core::mem::transmute(val) } }, get_sampler_opaque_capture_descriptor_data_ext: unsafe { unsafe extern "system" fn get_sampler_opaque_capture_descriptor_data_ext( _device: crate::vk::Device, _p_info: *const SamplerCaptureDescriptorDataInfoEXT<'_>, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_sampler_opaque_capture_descriptor_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetSamplerOpaqueCaptureDescriptorDataEXT\0", ); let val = _f(cname); if val.is_null() { get_sampler_opaque_capture_descriptor_data_ext } else { ::core::mem::transmute(val) } }, get_acceleration_structure_opaque_capture_descriptor_data_ext: unsafe { unsafe extern "system" fn get_acceleration_structure_opaque_capture_descriptor_data_ext( _device: crate::vk::Device, _p_info: *const AccelerationStructureCaptureDescriptorDataInfoEXT<'_>, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!( get_acceleration_structure_opaque_capture_descriptor_data_ext ) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT\0", ); let val = _f(cname); if val.is_null() { get_acceleration_structure_opaque_capture_descriptor_data_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_graphics_pipeline_library"] pub mod graphics_pipeline_library { use super::super::*; pub use { crate::vk::EXT_GRAPHICS_PIPELINE_LIBRARY_NAME as NAME, crate::vk::EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_mesh_shader"] pub mod mesh_shader { use super::super::*; pub use { crate::vk::EXT_MESH_SHADER_NAME as NAME, crate::vk::EXT_MESH_SHADER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_mesh_shader device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_mesh_shader device-level function pointers"] pub struct DeviceFn { pub cmd_draw_mesh_tasks_ext: PFN_vkCmdDrawMeshTasksEXT, pub cmd_draw_mesh_tasks_indirect_ext: PFN_vkCmdDrawMeshTasksIndirectEXT, pub cmd_draw_mesh_tasks_indirect_count_ext: PFN_vkCmdDrawMeshTasksIndirectCountEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_mesh_tasks_ext: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_ext( _command_buffer: CommandBuffer, _group_count_x: u32, _group_count_y: u32, _group_count_z: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksEXT\0"); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_ext } else { ::core::mem::transmute(val) } }, cmd_draw_mesh_tasks_indirect_ext: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_ext( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_indirect_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksIndirectEXT\0"); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_indirect_ext } else { ::core::mem::transmute(val) } }, cmd_draw_mesh_tasks_indirect_count_ext: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_ext( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_indirect_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDrawMeshTasksIndirectCountEXT\0", ); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_indirect_count_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_ycbcr_2plane_444_formats"] pub mod ycbcr_2plane_444_formats { use super::super::*; pub use { crate::vk::EXT_YCBCR_2PLANE_444_FORMATS_NAME as NAME, crate::vk::EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_fragment_density_map2"] pub mod fragment_density_map2 { use super::super::*; pub use { crate::vk::EXT_FRAGMENT_DENSITY_MAP2_NAME as NAME, crate::vk::EXT_FRAGMENT_DENSITY_MAP2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_image_robustness"] pub mod image_robustness { use super::super::*; pub use { crate::vk::EXT_IMAGE_ROBUSTNESS_NAME as NAME, crate::vk::EXT_IMAGE_ROBUSTNESS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_image_compression_control"] pub mod image_compression_control { use super::super::*; pub use { crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_NAME as NAME, crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_image_compression_control device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_image_compression_control device-level function pointers"] pub struct DeviceFn { pub get_image_subresource_layout2_ext: PFN_vkGetImageSubresourceLayout2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_image_subresource_layout2_ext: unsafe { unsafe extern "system" fn get_image_subresource_layout2_ext( _device: crate::vk::Device, _image: Image, _p_subresource: *const ImageSubresource2KHR<'_>, _p_layout: *mut SubresourceLayout2KHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_subresource_layout2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageSubresourceLayout2EXT\0", ); let val = _f(cname); if val.is_null() { get_image_subresource_layout2_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_attachment_feedback_loop_layout"] pub mod attachment_feedback_loop_layout { use super::super::*; pub use { crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_NAME as NAME, crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_4444_formats"] pub mod _4444_formats { use super::super::*; pub use { crate::vk::EXT_4444_FORMATS_NAME as NAME, crate::vk::EXT_4444_FORMATS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_device_fault"] pub mod device_fault { use super::super::*; pub use { crate::vk::EXT_DEVICE_FAULT_NAME as NAME, crate::vk::EXT_DEVICE_FAULT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_device_fault device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_device_fault device-level function pointers"] pub struct DeviceFn { pub get_device_fault_info_ext: PFN_vkGetDeviceFaultInfoEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_device_fault_info_ext: unsafe { unsafe extern "system" fn get_device_fault_info_ext( _device: crate::vk::Device, _p_fault_counts: *mut DeviceFaultCountsEXT<'_>, _p_fault_info: *mut DeviceFaultInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_fault_info_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceFaultInfoEXT\0"); let val = _f(cname); if val.is_null() { get_device_fault_info_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_rgba10x6_formats"] pub mod rgba10x6_formats { use super::super::*; pub use { crate::vk::EXT_RGBA10X6_FORMATS_NAME as NAME, crate::vk::EXT_RGBA10X6_FORMATS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_directfb_surface"] pub mod directfb_surface { use super::super::*; pub use { crate::vk::EXT_DIRECTFB_SURFACE_NAME as NAME, crate::vk::EXT_DIRECTFB_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_directfb_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_directfb_surface instance-level function pointers"] pub struct InstanceFn { pub create_direct_fb_surface_ext: PFN_vkCreateDirectFBSurfaceEXT, pub get_physical_device_direct_fb_presentation_support_ext: PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_direct_fb_surface_ext: unsafe { unsafe extern "system" fn create_direct_fb_surface_ext( _instance: crate::vk::Instance, _p_create_info: *const DirectFBSurfaceCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_direct_fb_surface_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDirectFBSurfaceEXT\0"); let val = _f(cname); if val.is_null() { create_direct_fb_surface_ext } else { ::core::mem::transmute(val) } }, get_physical_device_direct_fb_presentation_support_ext: unsafe { unsafe extern "system" fn get_physical_device_direct_fb_presentation_support_ext( _physical_device: PhysicalDevice, _queue_family_index: u32, _dfb: *mut IDirectFB, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_direct_fb_presentation_support_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceDirectFBPresentationSupportEXT\0", ); let val = _f(cname); if val.is_null() { get_physical_device_direct_fb_presentation_support_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_vertex_input_dynamic_state"] pub mod vertex_input_dynamic_state { use super::super::*; pub use { crate::vk::EXT_VERTEX_INPUT_DYNAMIC_STATE_NAME as NAME, crate::vk::EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_vertex_input_dynamic_state device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_vertex_input_dynamic_state device-level function pointers"] pub struct DeviceFn { pub cmd_set_vertex_input_ext: PFN_vkCmdSetVertexInputEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_vertex_input_ext: unsafe { unsafe extern "system" fn cmd_set_vertex_input_ext( _command_buffer: CommandBuffer, _vertex_binding_description_count: u32, _p_vertex_binding_descriptions : * const VertexInputBindingDescription2EXT < '_ >, _vertex_attribute_description_count: u32, _p_vertex_attribute_descriptions : * const VertexInputAttributeDescription2EXT < '_ >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_vertex_input_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetVertexInputEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_vertex_input_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_physical_device_drm"] pub mod physical_device_drm { use super::super::*; pub use { crate::vk::EXT_PHYSICAL_DEVICE_DRM_NAME as NAME, crate::vk::EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_device_address_binding_report"] pub mod device_address_binding_report { use super::super::*; pub use { crate::vk::EXT_DEVICE_ADDRESS_BINDING_REPORT_NAME as NAME, crate::vk::EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_depth_clip_control"] pub mod depth_clip_control { use super::super::*; pub use { crate::vk::EXT_DEPTH_CLIP_CONTROL_NAME as NAME, crate::vk::EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_primitive_topology_list_restart"] pub mod primitive_topology_list_restart { use super::super::*; pub use { crate::vk::EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_NAME as NAME, crate::vk::EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pipeline_properties"] pub mod pipeline_properties { use super::super::*; pub use { crate::vk::EXT_PIPELINE_PROPERTIES_NAME as NAME, crate::vk::EXT_PIPELINE_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_pipeline_properties device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[allow(non_camel_case_types)] #[doc = "Implemented for all types that can be passed as argument to `pipeline_properties` in [`PFN_vkGetPipelinePropertiesEXT`]"] pub unsafe trait GetPipelinePropertiesEXTParamPipelineProperties {} unsafe impl GetPipelinePropertiesEXTParamPipelineProperties for PipelinePropertiesIdentifierEXT<'_> { } #[derive(Clone)] #[doc = "Raw VK_EXT_pipeline_properties device-level function pointers"] pub struct DeviceFn { pub get_pipeline_properties_ext: PFN_vkGetPipelinePropertiesEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_pipeline_properties_ext: unsafe { unsafe extern "system" fn get_pipeline_properties_ext( _device: crate::vk::Device, _p_pipeline_info: *const PipelineInfoEXT<'_>, _p_pipeline_properties: *mut BaseOutStructure<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_pipeline_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPipelinePropertiesEXT\0"); let val = _f(cname); if val.is_null() { get_pipeline_properties_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_frame_boundary"] pub mod frame_boundary { use super::super::*; pub use { crate::vk::EXT_FRAME_BOUNDARY_NAME as NAME, crate::vk::EXT_FRAME_BOUNDARY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_multisampled_render_to_single_sampled"] pub mod multisampled_render_to_single_sampled { use super::super::*; pub use { crate::vk::EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_NAME as NAME, crate::vk::EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_extended_dynamic_state2"] pub mod extended_dynamic_state2 { use super::super::*; pub use { crate::vk::EXT_EXTENDED_DYNAMIC_STATE2_NAME as NAME, crate::vk::EXT_EXTENDED_DYNAMIC_STATE2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_extended_dynamic_state2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_extended_dynamic_state2 device-level function pointers"] pub struct DeviceFn { pub cmd_set_patch_control_points_ext: PFN_vkCmdSetPatchControlPointsEXT, pub cmd_set_rasterizer_discard_enable_ext: PFN_vkCmdSetRasterizerDiscardEnable, pub cmd_set_depth_bias_enable_ext: PFN_vkCmdSetDepthBiasEnable, pub cmd_set_logic_op_ext: PFN_vkCmdSetLogicOpEXT, pub cmd_set_primitive_restart_enable_ext: PFN_vkCmdSetPrimitiveRestartEnable, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_patch_control_points_ext: unsafe { unsafe extern "system" fn cmd_set_patch_control_points_ext( _command_buffer: CommandBuffer, _patch_control_points: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_patch_control_points_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPatchControlPointsEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_patch_control_points_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterizer_discard_enable_ext: unsafe { unsafe extern "system" fn cmd_set_rasterizer_discard_enable_ext( _command_buffer: CommandBuffer, _rasterizer_discard_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterizer_discard_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizerDiscardEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterizer_discard_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_bias_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_bias_enable_ext( _command_buffer: CommandBuffer, _depth_bias_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bias_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBiasEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bias_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_logic_op_ext: unsafe { unsafe extern "system" fn cmd_set_logic_op_ext( _command_buffer: CommandBuffer, _logic_op: LogicOp, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_logic_op_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_logic_op_ext } else { ::core::mem::transmute(val) } }, cmd_set_primitive_restart_enable_ext: unsafe { unsafe extern "system" fn cmd_set_primitive_restart_enable_ext( _command_buffer: CommandBuffer, _primitive_restart_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_restart_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetPrimitiveRestartEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_primitive_restart_enable_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_color_write_enable"] pub mod color_write_enable { use super::super::*; pub use { crate::vk::EXT_COLOR_WRITE_ENABLE_NAME as NAME, crate::vk::EXT_COLOR_WRITE_ENABLE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_color_write_enable device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_color_write_enable device-level function pointers"] pub struct DeviceFn { pub cmd_set_color_write_enable_ext: PFN_vkCmdSetColorWriteEnableEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_color_write_enable_ext: unsafe { unsafe extern "system" fn cmd_set_color_write_enable_ext( _command_buffer: CommandBuffer, _attachment_count: u32, _p_color_write_enables: *const Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_write_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorWriteEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_write_enable_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_primitives_generated_query"] pub mod primitives_generated_query { use super::super::*; pub use { crate::vk::EXT_PRIMITIVES_GENERATED_QUERY_NAME as NAME, crate::vk::EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_global_priority_query"] pub mod global_priority_query { use super::super::*; pub use { crate::vk::EXT_GLOBAL_PRIORITY_QUERY_NAME as NAME, crate::vk::EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_image_view_min_lod"] pub mod image_view_min_lod { use super::super::*; pub use { crate::vk::EXT_IMAGE_VIEW_MIN_LOD_NAME as NAME, crate::vk::EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_multi_draw"] pub mod multi_draw { use super::super::*; pub use { crate::vk::EXT_MULTI_DRAW_NAME as NAME, crate::vk::EXT_MULTI_DRAW_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_multi_draw device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_multi_draw device-level function pointers"] pub struct DeviceFn { pub cmd_draw_multi_ext: PFN_vkCmdDrawMultiEXT, pub cmd_draw_multi_indexed_ext: PFN_vkCmdDrawMultiIndexedEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_multi_ext: unsafe { unsafe extern "system" fn cmd_draw_multi_ext( _command_buffer: CommandBuffer, _draw_count: u32, _p_vertex_info: *const MultiDrawInfoEXT, _instance_count: u32, _first_instance: u32, _stride: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_draw_multi_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMultiEXT\0"); let val = _f(cname); if val.is_null() { cmd_draw_multi_ext } else { ::core::mem::transmute(val) } }, cmd_draw_multi_indexed_ext: unsafe { unsafe extern "system" fn cmd_draw_multi_indexed_ext( _command_buffer: CommandBuffer, _draw_count: u32, _p_index_info: *const MultiDrawIndexedInfoEXT, _instance_count: u32, _first_instance: u32, _stride: u32, _p_vertex_offset: *const i32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_multi_indexed_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMultiIndexedEXT\0"); let val = _f(cname); if val.is_null() { cmd_draw_multi_indexed_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_image_2d_view_of_3d"] pub mod image_2d_view_of_3d { use super::super::*; pub use { crate::vk::EXT_IMAGE_2D_VIEW_OF_3D_NAME as NAME, crate::vk::EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_tile_image"] pub mod shader_tile_image { use super::super::*; pub use { crate::vk::EXT_SHADER_TILE_IMAGE_NAME as NAME, crate::vk::EXT_SHADER_TILE_IMAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_opacity_micromap"] pub mod opacity_micromap { use super::super::*; pub use { crate::vk::EXT_OPACITY_MICROMAP_NAME as NAME, crate::vk::EXT_OPACITY_MICROMAP_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_opacity_micromap device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_opacity_micromap device-level function pointers"] pub struct DeviceFn { pub create_micromap_ext: PFN_vkCreateMicromapEXT, pub destroy_micromap_ext: PFN_vkDestroyMicromapEXT, pub cmd_build_micromaps_ext: PFN_vkCmdBuildMicromapsEXT, pub build_micromaps_ext: PFN_vkBuildMicromapsEXT, pub copy_micromap_ext: PFN_vkCopyMicromapEXT, pub copy_micromap_to_memory_ext: PFN_vkCopyMicromapToMemoryEXT, pub copy_memory_to_micromap_ext: PFN_vkCopyMemoryToMicromapEXT, pub write_micromaps_properties_ext: PFN_vkWriteMicromapsPropertiesEXT, pub cmd_copy_micromap_ext: PFN_vkCmdCopyMicromapEXT, pub cmd_copy_micromap_to_memory_ext: PFN_vkCmdCopyMicromapToMemoryEXT, pub cmd_copy_memory_to_micromap_ext: PFN_vkCmdCopyMemoryToMicromapEXT, pub cmd_write_micromaps_properties_ext: PFN_vkCmdWriteMicromapsPropertiesEXT, pub get_device_micromap_compatibility_ext: PFN_vkGetDeviceMicromapCompatibilityEXT, pub get_micromap_build_sizes_ext: PFN_vkGetMicromapBuildSizesEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_micromap_ext: unsafe { unsafe extern "system" fn create_micromap_ext( _device: crate::vk::Device, _p_create_info: *const MicromapCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_micromap: *mut MicromapEXT, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_micromap_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateMicromapEXT\0"); let val = _f(cname); if val.is_null() { create_micromap_ext } else { ::core::mem::transmute(val) } }, destroy_micromap_ext: unsafe { unsafe extern "system" fn destroy_micromap_ext( _device: crate::vk::Device, _micromap: MicromapEXT, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_micromap_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyMicromapEXT\0"); let val = _f(cname); if val.is_null() { destroy_micromap_ext } else { ::core::mem::transmute(val) } }, cmd_build_micromaps_ext: unsafe { unsafe extern "system" fn cmd_build_micromaps_ext( _command_buffer: CommandBuffer, _info_count: u32, _p_infos: *const MicromapBuildInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_build_micromaps_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBuildMicromapsEXT\0"); let val = _f(cname); if val.is_null() { cmd_build_micromaps_ext } else { ::core::mem::transmute(val) } }, build_micromaps_ext: unsafe { unsafe extern "system" fn build_micromaps_ext( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _info_count: u32, _p_infos: *const MicromapBuildInfoEXT<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(build_micromaps_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBuildMicromapsEXT\0"); let val = _f(cname); if val.is_null() { build_micromaps_ext } else { ::core::mem::transmute(val) } }, copy_micromap_ext: unsafe { unsafe extern "system" fn copy_micromap_ext( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyMicromapInfoEXT<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(copy_micromap_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyMicromapEXT\0"); let val = _f(cname); if val.is_null() { copy_micromap_ext } else { ::core::mem::transmute(val) } }, copy_micromap_to_memory_ext: unsafe { unsafe extern "system" fn copy_micromap_to_memory_ext( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyMicromapToMemoryInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_micromap_to_memory_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyMicromapToMemoryEXT\0"); let val = _f(cname); if val.is_null() { copy_micromap_to_memory_ext } else { ::core::mem::transmute(val) } }, copy_memory_to_micromap_ext: unsafe { unsafe extern "system" fn copy_memory_to_micromap_ext( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyMemoryToMicromapInfoEXT<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_memory_to_micromap_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCopyMemoryToMicromapEXT\0"); let val = _f(cname); if val.is_null() { copy_memory_to_micromap_ext } else { ::core::mem::transmute(val) } }, write_micromaps_properties_ext: unsafe { unsafe extern "system" fn write_micromaps_properties_ext( _device: crate::vk::Device, _micromap_count: u32, _p_micromaps: *const MicromapEXT, _query_type: QueryType, _data_size: usize, _p_data: *mut c_void, _stride: usize, ) -> Result { panic!(concat!( "Unable to load ", stringify!(write_micromaps_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkWriteMicromapsPropertiesEXT\0"); let val = _f(cname); if val.is_null() { write_micromaps_properties_ext } else { ::core::mem::transmute(val) } }, cmd_copy_micromap_ext: unsafe { unsafe extern "system" fn cmd_copy_micromap_ext( _command_buffer: CommandBuffer, _p_info: *const CopyMicromapInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_micromap_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMicromapEXT\0"); let val = _f(cname); if val.is_null() { cmd_copy_micromap_ext } else { ::core::mem::transmute(val) } }, cmd_copy_micromap_to_memory_ext: unsafe { unsafe extern "system" fn cmd_copy_micromap_to_memory_ext( _command_buffer: CommandBuffer, _p_info: *const CopyMicromapToMemoryInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_micromap_to_memory_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMicromapToMemoryEXT\0"); let val = _f(cname); if val.is_null() { cmd_copy_micromap_to_memory_ext } else { ::core::mem::transmute(val) } }, cmd_copy_memory_to_micromap_ext: unsafe { unsafe extern "system" fn cmd_copy_memory_to_micromap_ext( _command_buffer: CommandBuffer, _p_info: *const CopyMemoryToMicromapInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_memory_to_micromap_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMemoryToMicromapEXT\0"); let val = _f(cname); if val.is_null() { cmd_copy_memory_to_micromap_ext } else { ::core::mem::transmute(val) } }, cmd_write_micromaps_properties_ext: unsafe { unsafe extern "system" fn cmd_write_micromaps_properties_ext( _command_buffer: CommandBuffer, _micromap_count: u32, _p_micromaps: *const MicromapEXT, _query_type: QueryType, _query_pool: QueryPool, _first_query: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_micromaps_properties_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdWriteMicromapsPropertiesEXT\0", ); let val = _f(cname); if val.is_null() { cmd_write_micromaps_properties_ext } else { ::core::mem::transmute(val) } }, get_device_micromap_compatibility_ext: unsafe { unsafe extern "system" fn get_device_micromap_compatibility_ext( _device: crate::vk::Device, _p_version_info: *const MicromapVersionInfoEXT<'_>, _p_compatibility: *mut AccelerationStructureCompatibilityKHR, ) { panic!(concat!( "Unable to load ", stringify!(get_device_micromap_compatibility_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceMicromapCompatibilityEXT\0", ); let val = _f(cname); if val.is_null() { get_device_micromap_compatibility_ext } else { ::core::mem::transmute(val) } }, get_micromap_build_sizes_ext: unsafe { unsafe extern "system" fn get_micromap_build_sizes_ext( _device: crate::vk::Device, _build_type: AccelerationStructureBuildTypeKHR, _p_build_info: *const MicromapBuildInfoEXT<'_>, _p_size_info: *mut MicromapBuildSizesInfoEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_micromap_build_sizes_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMicromapBuildSizesEXT\0"); let val = _f(cname); if val.is_null() { get_micromap_build_sizes_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_load_store_op_none"] pub mod load_store_op_none { use super::super::*; pub use { crate::vk::EXT_LOAD_STORE_OP_NONE_NAME as NAME, crate::vk::EXT_LOAD_STORE_OP_NONE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_border_color_swizzle"] pub mod border_color_swizzle { use super::super::*; pub use { crate::vk::EXT_BORDER_COLOR_SWIZZLE_NAME as NAME, crate::vk::EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pageable_device_local_memory"] pub mod pageable_device_local_memory { use super::super::*; pub use { crate::vk::EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_NAME as NAME, crate::vk::EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_pageable_device_local_memory device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_pageable_device_local_memory device-level function pointers"] pub struct DeviceFn { pub set_device_memory_priority_ext: PFN_vkSetDeviceMemoryPriorityEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { set_device_memory_priority_ext: unsafe { unsafe extern "system" fn set_device_memory_priority_ext( _device: crate::vk::Device, _memory: DeviceMemory, _priority: f32, ) { panic!(concat!( "Unable to load ", stringify!(set_device_memory_priority_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetDeviceMemoryPriorityEXT\0"); let val = _f(cname); if val.is_null() { set_device_memory_priority_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_image_sliced_view_of_3d"] pub mod image_sliced_view_of_3d { use super::super::*; pub use { crate::vk::EXT_IMAGE_SLICED_VIEW_OF_3D_NAME as NAME, crate::vk::EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_depth_clamp_zero_one"] pub mod depth_clamp_zero_one { use super::super::*; pub use { crate::vk::EXT_DEPTH_CLAMP_ZERO_ONE_NAME as NAME, crate::vk::EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_non_seamless_cube_map"] pub mod non_seamless_cube_map { use super::super::*; pub use { crate::vk::EXT_NON_SEAMLESS_CUBE_MAP_NAME as NAME, crate::vk::EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_image_compression_control_swapchain"] pub mod image_compression_control_swapchain { use super::super::*; pub use { crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_NAME as NAME, crate::vk::EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_nested_command_buffer"] pub mod nested_command_buffer { use super::super::*; pub use { crate::vk::EXT_NESTED_COMMAND_BUFFER_NAME as NAME, crate::vk::EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_external_memory_acquire_unmodified"] pub mod external_memory_acquire_unmodified { use super::super::*; pub use { crate::vk::EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_NAME as NAME, crate::vk::EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_extended_dynamic_state3"] pub mod extended_dynamic_state3 { use super::super::*; pub use { crate::vk::EXT_EXTENDED_DYNAMIC_STATE3_NAME as NAME, crate::vk::EXT_EXTENDED_DYNAMIC_STATE3_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_extended_dynamic_state3 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_extended_dynamic_state3 device-level function pointers"] pub struct DeviceFn { pub cmd_set_depth_clamp_enable_ext: PFN_vkCmdSetDepthClampEnableEXT, pub cmd_set_polygon_mode_ext: PFN_vkCmdSetPolygonModeEXT, pub cmd_set_rasterization_samples_ext: PFN_vkCmdSetRasterizationSamplesEXT, pub cmd_set_sample_mask_ext: PFN_vkCmdSetSampleMaskEXT, pub cmd_set_alpha_to_coverage_enable_ext: PFN_vkCmdSetAlphaToCoverageEnableEXT, pub cmd_set_alpha_to_one_enable_ext: PFN_vkCmdSetAlphaToOneEnableEXT, pub cmd_set_logic_op_enable_ext: PFN_vkCmdSetLogicOpEnableEXT, pub cmd_set_color_blend_enable_ext: PFN_vkCmdSetColorBlendEnableEXT, pub cmd_set_color_blend_equation_ext: PFN_vkCmdSetColorBlendEquationEXT, pub cmd_set_color_write_mask_ext: PFN_vkCmdSetColorWriteMaskEXT, pub cmd_set_tessellation_domain_origin_ext: PFN_vkCmdSetTessellationDomainOriginEXT, pub cmd_set_rasterization_stream_ext: PFN_vkCmdSetRasterizationStreamEXT, pub cmd_set_conservative_rasterization_mode_ext: PFN_vkCmdSetConservativeRasterizationModeEXT, pub cmd_set_extra_primitive_overestimation_size_ext: PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT, pub cmd_set_depth_clip_enable_ext: PFN_vkCmdSetDepthClipEnableEXT, pub cmd_set_sample_locations_enable_ext: PFN_vkCmdSetSampleLocationsEnableEXT, pub cmd_set_color_blend_advanced_ext: PFN_vkCmdSetColorBlendAdvancedEXT, pub cmd_set_provoking_vertex_mode_ext: PFN_vkCmdSetProvokingVertexModeEXT, pub cmd_set_line_rasterization_mode_ext: PFN_vkCmdSetLineRasterizationModeEXT, pub cmd_set_line_stipple_enable_ext: PFN_vkCmdSetLineStippleEnableEXT, pub cmd_set_depth_clip_negative_one_to_one_ext: PFN_vkCmdSetDepthClipNegativeOneToOneEXT, pub cmd_set_viewport_w_scaling_enable_nv: PFN_vkCmdSetViewportWScalingEnableNV, pub cmd_set_viewport_swizzle_nv: PFN_vkCmdSetViewportSwizzleNV, pub cmd_set_coverage_to_color_enable_nv: PFN_vkCmdSetCoverageToColorEnableNV, pub cmd_set_coverage_to_color_location_nv: PFN_vkCmdSetCoverageToColorLocationNV, pub cmd_set_coverage_modulation_mode_nv: PFN_vkCmdSetCoverageModulationModeNV, pub cmd_set_coverage_modulation_table_enable_nv: PFN_vkCmdSetCoverageModulationTableEnableNV, pub cmd_set_coverage_modulation_table_nv: PFN_vkCmdSetCoverageModulationTableNV, pub cmd_set_shading_rate_image_enable_nv: PFN_vkCmdSetShadingRateImageEnableNV, pub cmd_set_representative_fragment_test_enable_nv: PFN_vkCmdSetRepresentativeFragmentTestEnableNV, pub cmd_set_coverage_reduction_mode_nv: PFN_vkCmdSetCoverageReductionModeNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_depth_clamp_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clamp_enable_ext( _command_buffer: CommandBuffer, _depth_clamp_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clamp_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthClampEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_clamp_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_polygon_mode_ext: unsafe { unsafe extern "system" fn cmd_set_polygon_mode_ext( _command_buffer: CommandBuffer, _polygon_mode: PolygonMode, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_polygon_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPolygonModeEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_polygon_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterization_samples_ext: unsafe { unsafe extern "system" fn cmd_set_rasterization_samples_ext( _command_buffer: CommandBuffer, _rasterization_samples: SampleCountFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterization_samples_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizationSamplesEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterization_samples_ext } else { ::core::mem::transmute(val) } }, cmd_set_sample_mask_ext: unsafe { unsafe extern "system" fn cmd_set_sample_mask_ext( _command_buffer: CommandBuffer, _samples: SampleCountFlags, _p_sample_mask: *const SampleMask, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_sample_mask_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetSampleMaskEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_sample_mask_ext } else { ::core::mem::transmute(val) } }, cmd_set_alpha_to_coverage_enable_ext: unsafe { unsafe extern "system" fn cmd_set_alpha_to_coverage_enable_ext( _command_buffer: CommandBuffer, _alpha_to_coverage_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_alpha_to_coverage_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetAlphaToCoverageEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_alpha_to_coverage_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_alpha_to_one_enable_ext: unsafe { unsafe extern "system" fn cmd_set_alpha_to_one_enable_ext( _command_buffer: CommandBuffer, _alpha_to_one_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_alpha_to_one_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetAlphaToOneEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_alpha_to_one_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_logic_op_enable_ext: unsafe { unsafe extern "system" fn cmd_set_logic_op_enable_ext( _command_buffer: CommandBuffer, _logic_op_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_logic_op_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_logic_op_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_enable_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_enable_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_enables: *const Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_equation_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_equation_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_equations: *const ColorBlendEquationEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_equation_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendEquationEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_equation_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_write_mask_ext: unsafe { unsafe extern "system" fn cmd_set_color_write_mask_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_write_masks: *const ColorComponentFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_write_mask_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorWriteMaskEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_write_mask_ext } else { ::core::mem::transmute(val) } }, cmd_set_tessellation_domain_origin_ext: unsafe { unsafe extern "system" fn cmd_set_tessellation_domain_origin_ext( _command_buffer: CommandBuffer, _domain_origin: TessellationDomainOrigin, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_tessellation_domain_origin_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetTessellationDomainOriginEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_tessellation_domain_origin_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterization_stream_ext: unsafe { unsafe extern "system" fn cmd_set_rasterization_stream_ext( _command_buffer: CommandBuffer, _rasterization_stream: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterization_stream_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizationStreamEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterization_stream_ext } else { ::core::mem::transmute(val) } }, cmd_set_conservative_rasterization_mode_ext: unsafe { unsafe extern "system" fn cmd_set_conservative_rasterization_mode_ext( _command_buffer: CommandBuffer, _conservative_rasterization_mode: ConservativeRasterizationModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_conservative_rasterization_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetConservativeRasterizationModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_conservative_rasterization_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_extra_primitive_overestimation_size_ext: unsafe { unsafe extern "system" fn cmd_set_extra_primitive_overestimation_size_ext( _command_buffer: CommandBuffer, _extra_primitive_overestimation_size: f32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_extra_primitive_overestimation_size_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetExtraPrimitiveOverestimationSizeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_extra_primitive_overestimation_size_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_clip_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clip_enable_ext( _command_buffer: CommandBuffer, _depth_clip_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clip_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthClipEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_clip_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_sample_locations_enable_ext: unsafe { unsafe extern "system" fn cmd_set_sample_locations_enable_ext( _command_buffer: CommandBuffer, _sample_locations_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_sample_locations_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetSampleLocationsEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_sample_locations_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_advanced_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_advanced_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_advanced: *const ColorBlendAdvancedEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_advanced_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendAdvancedEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_advanced_ext } else { ::core::mem::transmute(val) } }, cmd_set_provoking_vertex_mode_ext: unsafe { unsafe extern "system" fn cmd_set_provoking_vertex_mode_ext( _command_buffer: CommandBuffer, _provoking_vertex_mode: ProvokingVertexModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_provoking_vertex_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetProvokingVertexModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_provoking_vertex_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_line_rasterization_mode_ext: unsafe { unsafe extern "system" fn cmd_set_line_rasterization_mode_ext( _command_buffer: CommandBuffer, _line_rasterization_mode: LineRasterizationModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_rasterization_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetLineRasterizationModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_line_rasterization_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_line_stipple_enable_ext: unsafe { unsafe extern "system" fn cmd_set_line_stipple_enable_ext( _command_buffer: CommandBuffer, _stippled_line_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_stipple_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineStippleEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_line_stipple_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_clip_negative_one_to_one_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clip_negative_one_to_one_ext( _command_buffer: CommandBuffer, _negative_one_to_one: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clip_negative_one_to_one_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDepthClipNegativeOneToOneEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_depth_clip_negative_one_to_one_ext } else { ::core::mem::transmute(val) } }, cmd_set_viewport_w_scaling_enable_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_w_scaling_enable_nv( _command_buffer: CommandBuffer, _viewport_w_scaling_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_w_scaling_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetViewportWScalingEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_viewport_w_scaling_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_viewport_swizzle_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_swizzle_nv( _command_buffer: CommandBuffer, _first_viewport: u32, _viewport_count: u32, _p_viewport_swizzles: *const ViewportSwizzleNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_swizzle_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportSwizzleNV\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_swizzle_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_to_color_enable_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_to_color_enable_nv( _command_buffer: CommandBuffer, _coverage_to_color_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_to_color_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageToColorEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_to_color_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_to_color_location_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_to_color_location_nv( _command_buffer: CommandBuffer, _coverage_to_color_location: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_to_color_location_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageToColorLocationNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_to_color_location_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_mode_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_mode_nv( _command_buffer: CommandBuffer, _coverage_modulation_mode: CoverageModulationModeNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_mode_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationModeNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_mode_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_table_enable_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_table_enable_nv( _command_buffer: CommandBuffer, _coverage_modulation_table_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_table_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationTableEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_table_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_table_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_table_nv( _command_buffer: CommandBuffer, _coverage_modulation_table_count: u32, _p_coverage_modulation_table: *const f32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_table_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationTableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_table_nv } else { ::core::mem::transmute(val) } }, cmd_set_shading_rate_image_enable_nv: unsafe { unsafe extern "system" fn cmd_set_shading_rate_image_enable_nv( _command_buffer: CommandBuffer, _shading_rate_image_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_shading_rate_image_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetShadingRateImageEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_shading_rate_image_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_representative_fragment_test_enable_nv: unsafe { unsafe extern "system" fn cmd_set_representative_fragment_test_enable_nv( _command_buffer: CommandBuffer, _representative_fragment_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_representative_fragment_test_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRepresentativeFragmentTestEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_representative_fragment_test_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_reduction_mode_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_reduction_mode_nv( _command_buffer: CommandBuffer, _coverage_reduction_mode: CoverageReductionModeNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_reduction_mode_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageReductionModeNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_reduction_mode_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_subpass_merge_feedback"] pub mod subpass_merge_feedback { use super::super::*; pub use { crate::vk::EXT_SUBPASS_MERGE_FEEDBACK_NAME as NAME, crate::vk::EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_module_identifier"] pub mod shader_module_identifier { use super::super::*; pub use { crate::vk::EXT_SHADER_MODULE_IDENTIFIER_NAME as NAME, crate::vk::EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_shader_module_identifier device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_shader_module_identifier device-level function pointers"] pub struct DeviceFn { pub get_shader_module_identifier_ext: PFN_vkGetShaderModuleIdentifierEXT, pub get_shader_module_create_info_identifier_ext: PFN_vkGetShaderModuleCreateInfoIdentifierEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_shader_module_identifier_ext: unsafe { unsafe extern "system" fn get_shader_module_identifier_ext( _device: crate::vk::Device, _shader_module: ShaderModule, _p_identifier: *mut ShaderModuleIdentifierEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_shader_module_identifier_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetShaderModuleIdentifierEXT\0", ); let val = _f(cname); if val.is_null() { get_shader_module_identifier_ext } else { ::core::mem::transmute(val) } }, get_shader_module_create_info_identifier_ext: unsafe { unsafe extern "system" fn get_shader_module_create_info_identifier_ext( _device: crate::vk::Device, _p_create_info: *const ShaderModuleCreateInfo<'_>, _p_identifier: *mut ShaderModuleIdentifierEXT<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_shader_module_create_info_identifier_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetShaderModuleCreateInfoIdentifierEXT\0", ); let val = _f(cname); if val.is_null() { get_shader_module_create_info_identifier_ext } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_rasterization_order_attachment_access"] pub mod rasterization_order_attachment_access { use super::super::*; pub use { crate::vk::EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_NAME as NAME, crate::vk::EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_legacy_dithering"] pub mod legacy_dithering { use super::super::*; pub use { crate::vk::EXT_LEGACY_DITHERING_NAME as NAME, crate::vk::EXT_LEGACY_DITHERING_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pipeline_protected_access"] pub mod pipeline_protected_access { use super::super::*; pub use { crate::vk::EXT_PIPELINE_PROTECTED_ACCESS_NAME as NAME, crate::vk::EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_shader_object"] pub mod shader_object { use super::super::*; pub use { crate::vk::EXT_SHADER_OBJECT_NAME as NAME, crate::vk::EXT_SHADER_OBJECT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_shader_object device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_shader_object device-level function pointers"] pub struct DeviceFn { pub create_shaders_ext: PFN_vkCreateShadersEXT, pub destroy_shader_ext: PFN_vkDestroyShaderEXT, pub get_shader_binary_data_ext: PFN_vkGetShaderBinaryDataEXT, pub cmd_bind_shaders_ext: PFN_vkCmdBindShadersEXT, pub cmd_set_cull_mode_ext: PFN_vkCmdSetCullMode, pub cmd_set_front_face_ext: PFN_vkCmdSetFrontFace, pub cmd_set_primitive_topology_ext: PFN_vkCmdSetPrimitiveTopology, pub cmd_set_viewport_with_count_ext: PFN_vkCmdSetViewportWithCount, pub cmd_set_scissor_with_count_ext: PFN_vkCmdSetScissorWithCount, pub cmd_bind_vertex_buffers2_ext: PFN_vkCmdBindVertexBuffers2, pub cmd_set_depth_test_enable_ext: PFN_vkCmdSetDepthTestEnable, pub cmd_set_depth_write_enable_ext: PFN_vkCmdSetDepthWriteEnable, pub cmd_set_depth_compare_op_ext: PFN_vkCmdSetDepthCompareOp, pub cmd_set_depth_bounds_test_enable_ext: PFN_vkCmdSetDepthBoundsTestEnable, pub cmd_set_stencil_test_enable_ext: PFN_vkCmdSetStencilTestEnable, pub cmd_set_stencil_op_ext: PFN_vkCmdSetStencilOp, pub cmd_set_vertex_input_ext: PFN_vkCmdSetVertexInputEXT, pub cmd_set_patch_control_points_ext: PFN_vkCmdSetPatchControlPointsEXT, pub cmd_set_rasterizer_discard_enable_ext: PFN_vkCmdSetRasterizerDiscardEnable, pub cmd_set_depth_bias_enable_ext: PFN_vkCmdSetDepthBiasEnable, pub cmd_set_logic_op_ext: PFN_vkCmdSetLogicOpEXT, pub cmd_set_primitive_restart_enable_ext: PFN_vkCmdSetPrimitiveRestartEnable, pub cmd_set_tessellation_domain_origin_ext: PFN_vkCmdSetTessellationDomainOriginEXT, pub cmd_set_depth_clamp_enable_ext: PFN_vkCmdSetDepthClampEnableEXT, pub cmd_set_polygon_mode_ext: PFN_vkCmdSetPolygonModeEXT, pub cmd_set_rasterization_samples_ext: PFN_vkCmdSetRasterizationSamplesEXT, pub cmd_set_sample_mask_ext: PFN_vkCmdSetSampleMaskEXT, pub cmd_set_alpha_to_coverage_enable_ext: PFN_vkCmdSetAlphaToCoverageEnableEXT, pub cmd_set_alpha_to_one_enable_ext: PFN_vkCmdSetAlphaToOneEnableEXT, pub cmd_set_logic_op_enable_ext: PFN_vkCmdSetLogicOpEnableEXT, pub cmd_set_color_blend_enable_ext: PFN_vkCmdSetColorBlendEnableEXT, pub cmd_set_color_blend_equation_ext: PFN_vkCmdSetColorBlendEquationEXT, pub cmd_set_color_write_mask_ext: PFN_vkCmdSetColorWriteMaskEXT, pub cmd_set_rasterization_stream_ext: PFN_vkCmdSetRasterizationStreamEXT, pub cmd_set_conservative_rasterization_mode_ext: PFN_vkCmdSetConservativeRasterizationModeEXT, pub cmd_set_extra_primitive_overestimation_size_ext: PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT, pub cmd_set_depth_clip_enable_ext: PFN_vkCmdSetDepthClipEnableEXT, pub cmd_set_sample_locations_enable_ext: PFN_vkCmdSetSampleLocationsEnableEXT, pub cmd_set_color_blend_advanced_ext: PFN_vkCmdSetColorBlendAdvancedEXT, pub cmd_set_provoking_vertex_mode_ext: PFN_vkCmdSetProvokingVertexModeEXT, pub cmd_set_line_rasterization_mode_ext: PFN_vkCmdSetLineRasterizationModeEXT, pub cmd_set_line_stipple_enable_ext: PFN_vkCmdSetLineStippleEnableEXT, pub cmd_set_depth_clip_negative_one_to_one_ext: PFN_vkCmdSetDepthClipNegativeOneToOneEXT, pub cmd_set_viewport_w_scaling_enable_nv: PFN_vkCmdSetViewportWScalingEnableNV, pub cmd_set_viewport_swizzle_nv: PFN_vkCmdSetViewportSwizzleNV, pub cmd_set_coverage_to_color_enable_nv: PFN_vkCmdSetCoverageToColorEnableNV, pub cmd_set_coverage_to_color_location_nv: PFN_vkCmdSetCoverageToColorLocationNV, pub cmd_set_coverage_modulation_mode_nv: PFN_vkCmdSetCoverageModulationModeNV, pub cmd_set_coverage_modulation_table_enable_nv: PFN_vkCmdSetCoverageModulationTableEnableNV, pub cmd_set_coverage_modulation_table_nv: PFN_vkCmdSetCoverageModulationTableNV, pub cmd_set_shading_rate_image_enable_nv: PFN_vkCmdSetShadingRateImageEnableNV, pub cmd_set_representative_fragment_test_enable_nv: PFN_vkCmdSetRepresentativeFragmentTestEnableNV, pub cmd_set_coverage_reduction_mode_nv: PFN_vkCmdSetCoverageReductionModeNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_shaders_ext: unsafe { unsafe extern "system" fn create_shaders_ext( _device: crate::vk::Device, _create_info_count: u32, _p_create_infos: *const ShaderCreateInfoEXT<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_shaders: *mut ShaderEXT, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_shaders_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateShadersEXT\0"); let val = _f(cname); if val.is_null() { create_shaders_ext } else { ::core::mem::transmute(val) } }, destroy_shader_ext: unsafe { unsafe extern "system" fn destroy_shader_ext( _device: crate::vk::Device, _shader: ShaderEXT, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_shader_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyShaderEXT\0"); let val = _f(cname); if val.is_null() { destroy_shader_ext } else { ::core::mem::transmute(val) } }, get_shader_binary_data_ext: unsafe { unsafe extern "system" fn get_shader_binary_data_ext( _device: crate::vk::Device, _shader: ShaderEXT, _p_data_size: *mut usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_shader_binary_data_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetShaderBinaryDataEXT\0"); let val = _f(cname); if val.is_null() { get_shader_binary_data_ext } else { ::core::mem::transmute(val) } }, cmd_bind_shaders_ext: unsafe { unsafe extern "system" fn cmd_bind_shaders_ext( _command_buffer: CommandBuffer, _stage_count: u32, _p_stages: *const ShaderStageFlags, _p_shaders: *const ShaderEXT, ) { panic!(concat!("Unable to load ", stringify!(cmd_bind_shaders_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindShadersEXT\0"); let val = _f(cname); if val.is_null() { cmd_bind_shaders_ext } else { ::core::mem::transmute(val) } }, cmd_set_cull_mode_ext: unsafe { unsafe extern "system" fn cmd_set_cull_mode_ext( _command_buffer: CommandBuffer, _cull_mode: CullModeFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_cull_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCullModeEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_cull_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_front_face_ext: unsafe { unsafe extern "system" fn cmd_set_front_face_ext( _command_buffer: CommandBuffer, _front_face: FrontFace, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_front_face_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetFrontFaceEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_front_face_ext } else { ::core::mem::transmute(val) } }, cmd_set_primitive_topology_ext: unsafe { unsafe extern "system" fn cmd_set_primitive_topology_ext( _command_buffer: CommandBuffer, _primitive_topology: PrimitiveTopology, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_topology_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPrimitiveTopologyEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_primitive_topology_ext } else { ::core::mem::transmute(val) } }, cmd_set_viewport_with_count_ext: unsafe { unsafe extern "system" fn cmd_set_viewport_with_count_ext( _command_buffer: CommandBuffer, _viewport_count: u32, _p_viewports: *const Viewport, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_with_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportWithCountEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_with_count_ext } else { ::core::mem::transmute(val) } }, cmd_set_scissor_with_count_ext: unsafe { unsafe extern "system" fn cmd_set_scissor_with_count_ext( _command_buffer: CommandBuffer, _scissor_count: u32, _p_scissors: *const Rect2D, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_scissor_with_count_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissorWithCountEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_scissor_with_count_ext } else { ::core::mem::transmute(val) } }, cmd_bind_vertex_buffers2_ext: unsafe { unsafe extern "system" fn cmd_bind_vertex_buffers2_ext( _command_buffer: CommandBuffer, _first_binding: u32, _binding_count: u32, _p_buffers: *const Buffer, _p_offsets: *const DeviceSize, _p_sizes: *const DeviceSize, _p_strides: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_vertex_buffers2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers2EXT\0"); let val = _f(cname); if val.is_null() { cmd_bind_vertex_buffers2_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_test_enable_ext( _command_buffer: CommandBuffer, _depth_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthTestEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_write_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_write_enable_ext( _command_buffer: CommandBuffer, _depth_write_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_write_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthWriteEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_write_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_compare_op_ext: unsafe { unsafe extern "system" fn cmd_set_depth_compare_op_ext( _command_buffer: CommandBuffer, _depth_compare_op: CompareOp, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_compare_op_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthCompareOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_compare_op_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_bounds_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_bounds_test_enable_ext( _command_buffer: CommandBuffer, _depth_bounds_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bounds_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDepthBoundsTestEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_depth_bounds_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_stencil_test_enable_ext: unsafe { unsafe extern "system" fn cmd_set_stencil_test_enable_ext( _command_buffer: CommandBuffer, _stencil_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_test_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilTestEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_test_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_stencil_op_ext: unsafe { unsafe extern "system" fn cmd_set_stencil_op_ext( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _fail_op: StencilOp, _pass_op: StencilOp, _depth_fail_op: StencilOp, _compare_op: CompareOp, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_op_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_op_ext } else { ::core::mem::transmute(val) } }, cmd_set_vertex_input_ext: unsafe { unsafe extern "system" fn cmd_set_vertex_input_ext( _command_buffer: CommandBuffer, _vertex_binding_description_count: u32, _p_vertex_binding_descriptions : * const VertexInputBindingDescription2EXT < '_ >, _vertex_attribute_description_count: u32, _p_vertex_attribute_descriptions : * const VertexInputAttributeDescription2EXT < '_ >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_vertex_input_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetVertexInputEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_vertex_input_ext } else { ::core::mem::transmute(val) } }, cmd_set_patch_control_points_ext: unsafe { unsafe extern "system" fn cmd_set_patch_control_points_ext( _command_buffer: CommandBuffer, _patch_control_points: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_patch_control_points_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPatchControlPointsEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_patch_control_points_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterizer_discard_enable_ext: unsafe { unsafe extern "system" fn cmd_set_rasterizer_discard_enable_ext( _command_buffer: CommandBuffer, _rasterizer_discard_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterizer_discard_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizerDiscardEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterizer_discard_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_bias_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_bias_enable_ext( _command_buffer: CommandBuffer, _depth_bias_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bias_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBiasEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bias_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_logic_op_ext: unsafe { unsafe extern "system" fn cmd_set_logic_op_ext( _command_buffer: CommandBuffer, _logic_op: LogicOp, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_logic_op_ext))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_logic_op_ext } else { ::core::mem::transmute(val) } }, cmd_set_primitive_restart_enable_ext: unsafe { unsafe extern "system" fn cmd_set_primitive_restart_enable_ext( _command_buffer: CommandBuffer, _primitive_restart_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_restart_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetPrimitiveRestartEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_primitive_restart_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_tessellation_domain_origin_ext: unsafe { unsafe extern "system" fn cmd_set_tessellation_domain_origin_ext( _command_buffer: CommandBuffer, _domain_origin: TessellationDomainOrigin, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_tessellation_domain_origin_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetTessellationDomainOriginEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_tessellation_domain_origin_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_clamp_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clamp_enable_ext( _command_buffer: CommandBuffer, _depth_clamp_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clamp_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthClampEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_clamp_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_polygon_mode_ext: unsafe { unsafe extern "system" fn cmd_set_polygon_mode_ext( _command_buffer: CommandBuffer, _polygon_mode: PolygonMode, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_polygon_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPolygonModeEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_polygon_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterization_samples_ext: unsafe { unsafe extern "system" fn cmd_set_rasterization_samples_ext( _command_buffer: CommandBuffer, _rasterization_samples: SampleCountFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterization_samples_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizationSamplesEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterization_samples_ext } else { ::core::mem::transmute(val) } }, cmd_set_sample_mask_ext: unsafe { unsafe extern "system" fn cmd_set_sample_mask_ext( _command_buffer: CommandBuffer, _samples: SampleCountFlags, _p_sample_mask: *const SampleMask, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_sample_mask_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetSampleMaskEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_sample_mask_ext } else { ::core::mem::transmute(val) } }, cmd_set_alpha_to_coverage_enable_ext: unsafe { unsafe extern "system" fn cmd_set_alpha_to_coverage_enable_ext( _command_buffer: CommandBuffer, _alpha_to_coverage_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_alpha_to_coverage_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetAlphaToCoverageEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_alpha_to_coverage_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_alpha_to_one_enable_ext: unsafe { unsafe extern "system" fn cmd_set_alpha_to_one_enable_ext( _command_buffer: CommandBuffer, _alpha_to_one_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_alpha_to_one_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetAlphaToOneEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_alpha_to_one_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_logic_op_enable_ext: unsafe { unsafe extern "system" fn cmd_set_logic_op_enable_ext( _command_buffer: CommandBuffer, _logic_op_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_logic_op_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_logic_op_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_enable_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_enable_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_enables: *const Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_equation_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_equation_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_equations: *const ColorBlendEquationEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_equation_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendEquationEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_equation_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_write_mask_ext: unsafe { unsafe extern "system" fn cmd_set_color_write_mask_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_write_masks: *const ColorComponentFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_write_mask_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorWriteMaskEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_write_mask_ext } else { ::core::mem::transmute(val) } }, cmd_set_rasterization_stream_ext: unsafe { unsafe extern "system" fn cmd_set_rasterization_stream_ext( _command_buffer: CommandBuffer, _rasterization_stream: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterization_stream_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRasterizationStreamEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_rasterization_stream_ext } else { ::core::mem::transmute(val) } }, cmd_set_conservative_rasterization_mode_ext: unsafe { unsafe extern "system" fn cmd_set_conservative_rasterization_mode_ext( _command_buffer: CommandBuffer, _conservative_rasterization_mode: ConservativeRasterizationModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_conservative_rasterization_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetConservativeRasterizationModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_conservative_rasterization_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_extra_primitive_overestimation_size_ext: unsafe { unsafe extern "system" fn cmd_set_extra_primitive_overestimation_size_ext( _command_buffer: CommandBuffer, _extra_primitive_overestimation_size: f32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_extra_primitive_overestimation_size_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetExtraPrimitiveOverestimationSizeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_extra_primitive_overestimation_size_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_clip_enable_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clip_enable_ext( _command_buffer: CommandBuffer, _depth_clip_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clip_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthClipEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_clip_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_sample_locations_enable_ext: unsafe { unsafe extern "system" fn cmd_set_sample_locations_enable_ext( _command_buffer: CommandBuffer, _sample_locations_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_sample_locations_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetSampleLocationsEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_sample_locations_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_color_blend_advanced_ext: unsafe { unsafe extern "system" fn cmd_set_color_blend_advanced_ext( _command_buffer: CommandBuffer, _first_attachment: u32, _attachment_count: u32, _p_color_blend_advanced: *const ColorBlendAdvancedEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_color_blend_advanced_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorBlendAdvancedEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_color_blend_advanced_ext } else { ::core::mem::transmute(val) } }, cmd_set_provoking_vertex_mode_ext: unsafe { unsafe extern "system" fn cmd_set_provoking_vertex_mode_ext( _command_buffer: CommandBuffer, _provoking_vertex_mode: ProvokingVertexModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_provoking_vertex_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetProvokingVertexModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_provoking_vertex_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_line_rasterization_mode_ext: unsafe { unsafe extern "system" fn cmd_set_line_rasterization_mode_ext( _command_buffer: CommandBuffer, _line_rasterization_mode: LineRasterizationModeEXT, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_rasterization_mode_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetLineRasterizationModeEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_line_rasterization_mode_ext } else { ::core::mem::transmute(val) } }, cmd_set_line_stipple_enable_ext: unsafe { unsafe extern "system" fn cmd_set_line_stipple_enable_ext( _command_buffer: CommandBuffer, _stippled_line_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_stipple_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineStippleEnableEXT\0"); let val = _f(cname); if val.is_null() { cmd_set_line_stipple_enable_ext } else { ::core::mem::transmute(val) } }, cmd_set_depth_clip_negative_one_to_one_ext: unsafe { unsafe extern "system" fn cmd_set_depth_clip_negative_one_to_one_ext( _command_buffer: CommandBuffer, _negative_one_to_one: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_clip_negative_one_to_one_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDepthClipNegativeOneToOneEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_depth_clip_negative_one_to_one_ext } else { ::core::mem::transmute(val) } }, cmd_set_viewport_w_scaling_enable_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_w_scaling_enable_nv( _command_buffer: CommandBuffer, _viewport_w_scaling_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_w_scaling_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetViewportWScalingEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_viewport_w_scaling_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_viewport_swizzle_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_swizzle_nv( _command_buffer: CommandBuffer, _first_viewport: u32, _viewport_count: u32, _p_viewport_swizzles: *const ViewportSwizzleNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_swizzle_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportSwizzleNV\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_swizzle_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_to_color_enable_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_to_color_enable_nv( _command_buffer: CommandBuffer, _coverage_to_color_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_to_color_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageToColorEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_to_color_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_to_color_location_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_to_color_location_nv( _command_buffer: CommandBuffer, _coverage_to_color_location: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_to_color_location_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageToColorLocationNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_to_color_location_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_mode_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_mode_nv( _command_buffer: CommandBuffer, _coverage_modulation_mode: CoverageModulationModeNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_mode_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationModeNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_mode_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_table_enable_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_table_enable_nv( _command_buffer: CommandBuffer, _coverage_modulation_table_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_table_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationTableEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_table_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_modulation_table_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_modulation_table_nv( _command_buffer: CommandBuffer, _coverage_modulation_table_count: u32, _p_coverage_modulation_table: *const f32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_modulation_table_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageModulationTableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_modulation_table_nv } else { ::core::mem::transmute(val) } }, cmd_set_shading_rate_image_enable_nv: unsafe { unsafe extern "system" fn cmd_set_shading_rate_image_enable_nv( _command_buffer: CommandBuffer, _shading_rate_image_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_shading_rate_image_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetShadingRateImageEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_shading_rate_image_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_representative_fragment_test_enable_nv: unsafe { unsafe extern "system" fn cmd_set_representative_fragment_test_enable_nv( _command_buffer: CommandBuffer, _representative_fragment_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_representative_fragment_test_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRepresentativeFragmentTestEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_representative_fragment_test_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_coverage_reduction_mode_nv: unsafe { unsafe extern "system" fn cmd_set_coverage_reduction_mode_nv( _command_buffer: CommandBuffer, _coverage_reduction_mode: CoverageReductionModeNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coverage_reduction_mode_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetCoverageReductionModeNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_coverage_reduction_mode_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_EXT_mutable_descriptor_type"] pub mod mutable_descriptor_type { use super::super::*; pub use { crate::vk::EXT_MUTABLE_DESCRIPTOR_TYPE_NAME as NAME, crate::vk::EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_layer_settings"] pub mod layer_settings { use super::super::*; pub use { crate::vk::EXT_LAYER_SETTINGS_NAME as NAME, crate::vk::EXT_LAYER_SETTINGS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_pipeline_library_group_handles"] pub mod pipeline_library_group_handles { use super::super::*; pub use { crate::vk::EXT_PIPELINE_LIBRARY_GROUP_HANDLES_NAME as NAME, crate::vk::EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_dynamic_rendering_unused_attachments"] pub mod dynamic_rendering_unused_attachments { use super::super::*; pub use { crate::vk::EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_NAME as NAME, crate::vk::EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_EXT_attachment_feedback_loop_dynamic_state"] pub mod attachment_feedback_loop_dynamic_state { use super::super::*; pub use { crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_NAME as NAME, crate::vk::EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_EXT_attachment_feedback_loop_dynamic_state device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_EXT_attachment_feedback_loop_dynamic_state device-level function pointers"] pub struct DeviceFn { pub cmd_set_attachment_feedback_loop_enable_ext: PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_attachment_feedback_loop_enable_ext: unsafe { unsafe extern "system" fn cmd_set_attachment_feedback_loop_enable_ext( _command_buffer: CommandBuffer, _aspect_mask: ImageAspectFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_attachment_feedback_loop_enable_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetAttachmentFeedbackLoopEnableEXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_attachment_feedback_loop_enable_ext } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged FUCHSIA"] pub mod fuchsia { #[doc = "VK_FUCHSIA_imagepipe_surface"] pub mod imagepipe_surface { use super::super::*; pub use { crate::vk::FUCHSIA_IMAGEPIPE_SURFACE_NAME as NAME, crate::vk::FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_FUCHSIA_imagepipe_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_FUCHSIA_imagepipe_surface instance-level function pointers"] pub struct InstanceFn { pub create_image_pipe_surface_fuchsia: PFN_vkCreateImagePipeSurfaceFUCHSIA, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_image_pipe_surface_fuchsia: unsafe { unsafe extern "system" fn create_image_pipe_surface_fuchsia( _instance: crate::vk::Instance, _p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_image_pipe_surface_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateImagePipeSurfaceFUCHSIA\0", ); let val = _f(cname); if val.is_null() { create_image_pipe_surface_fuchsia } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_FUCHSIA_external_memory"] pub mod external_memory { use super::super::*; pub use { crate::vk::FUCHSIA_EXTERNAL_MEMORY_NAME as NAME, crate::vk::FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_FUCHSIA_external_memory device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_FUCHSIA_external_memory device-level function pointers"] pub struct DeviceFn { pub get_memory_zircon_handle_fuchsia: PFN_vkGetMemoryZirconHandleFUCHSIA, pub get_memory_zircon_handle_properties_fuchsia: PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_zircon_handle_fuchsia: unsafe { unsafe extern "system" fn get_memory_zircon_handle_fuchsia( _device: crate::vk::Device, _p_get_zircon_handle_info: *const MemoryGetZirconHandleInfoFUCHSIA<'_>, _p_zircon_handle: *mut zx_handle_t, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_zircon_handle_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetMemoryZirconHandleFUCHSIA\0", ); let val = _f(cname); if val.is_null() { get_memory_zircon_handle_fuchsia } else { ::core::mem::transmute(val) } }, get_memory_zircon_handle_properties_fuchsia: unsafe { unsafe extern "system" fn get_memory_zircon_handle_properties_fuchsia( _device: crate::vk::Device, _handle_type: ExternalMemoryHandleTypeFlags, _zircon_handle: zx_handle_t, _p_memory_zircon_handle_properties : * mut MemoryZirconHandlePropertiesFUCHSIA < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_zircon_handle_properties_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetMemoryZirconHandlePropertiesFUCHSIA\0", ); let val = _f(cname); if val.is_null() { get_memory_zircon_handle_properties_fuchsia } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_FUCHSIA_external_semaphore"] pub mod external_semaphore { use super::super::*; pub use { crate::vk::FUCHSIA_EXTERNAL_SEMAPHORE_NAME as NAME, crate::vk::FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_FUCHSIA_external_semaphore device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_FUCHSIA_external_semaphore device-level function pointers"] pub struct DeviceFn { pub import_semaphore_zircon_handle_fuchsia: PFN_vkImportSemaphoreZirconHandleFUCHSIA, pub get_semaphore_zircon_handle_fuchsia: PFN_vkGetSemaphoreZirconHandleFUCHSIA, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { import_semaphore_zircon_handle_fuchsia: unsafe { unsafe extern "system" fn import_semaphore_zircon_handle_fuchsia( _device: crate::vk::Device, _p_import_semaphore_zircon_handle_info : * const ImportSemaphoreZirconHandleInfoFUCHSIA < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(import_semaphore_zircon_handle_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkImportSemaphoreZirconHandleFUCHSIA\0", ); let val = _f(cname); if val.is_null() { import_semaphore_zircon_handle_fuchsia } else { ::core::mem::transmute(val) } }, get_semaphore_zircon_handle_fuchsia: unsafe { unsafe extern "system" fn get_semaphore_zircon_handle_fuchsia( _device: crate::vk::Device, _p_get_zircon_handle_info: *const SemaphoreGetZirconHandleInfoFUCHSIA< '_, >, _p_zircon_handle: *mut zx_handle_t, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_semaphore_zircon_handle_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetSemaphoreZirconHandleFUCHSIA\0", ); let val = _f(cname); if val.is_null() { get_semaphore_zircon_handle_fuchsia } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_FUCHSIA_buffer_collection"] pub mod buffer_collection { use super::super::*; pub use { crate::vk::FUCHSIA_BUFFER_COLLECTION_NAME as NAME, crate::vk::FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_FUCHSIA_buffer_collection device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_FUCHSIA_buffer_collection device-level function pointers"] pub struct DeviceFn { pub create_buffer_collection_fuchsia: PFN_vkCreateBufferCollectionFUCHSIA, pub set_buffer_collection_image_constraints_fuchsia: PFN_vkSetBufferCollectionImageConstraintsFUCHSIA, pub set_buffer_collection_buffer_constraints_fuchsia: PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA, pub destroy_buffer_collection_fuchsia: PFN_vkDestroyBufferCollectionFUCHSIA, pub get_buffer_collection_properties_fuchsia: PFN_vkGetBufferCollectionPropertiesFUCHSIA, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_buffer_collection_fuchsia: unsafe { unsafe extern "system" fn create_buffer_collection_fuchsia( _device: crate::vk::Device, _p_create_info: *const BufferCollectionCreateInfoFUCHSIA<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_collection: *mut BufferCollectionFUCHSIA, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_buffer_collection_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateBufferCollectionFUCHSIA\0", ); let val = _f(cname); if val.is_null() { create_buffer_collection_fuchsia } else { ::core::mem::transmute(val) } }, set_buffer_collection_image_constraints_fuchsia: unsafe { unsafe extern "system" fn set_buffer_collection_image_constraints_fuchsia( _device: crate::vk::Device, _collection: BufferCollectionFUCHSIA, _p_image_constraints_info: *const ImageConstraintsInfoFUCHSIA<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(set_buffer_collection_image_constraints_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkSetBufferCollectionImageConstraintsFUCHSIA\0", ); let val = _f(cname); if val.is_null() { set_buffer_collection_image_constraints_fuchsia } else { ::core::mem::transmute(val) } }, set_buffer_collection_buffer_constraints_fuchsia: unsafe { unsafe extern "system" fn set_buffer_collection_buffer_constraints_fuchsia( _device: crate::vk::Device, _collection: BufferCollectionFUCHSIA, _p_buffer_constraints_info: *const BufferConstraintsInfoFUCHSIA<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(set_buffer_collection_buffer_constraints_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkSetBufferCollectionBufferConstraintsFUCHSIA\0", ); let val = _f(cname); if val.is_null() { set_buffer_collection_buffer_constraints_fuchsia } else { ::core::mem::transmute(val) } }, destroy_buffer_collection_fuchsia: unsafe { unsafe extern "system" fn destroy_buffer_collection_fuchsia( _device: crate::vk::Device, _collection: BufferCollectionFUCHSIA, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_buffer_collection_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyBufferCollectionFUCHSIA\0", ); let val = _f(cname); if val.is_null() { destroy_buffer_collection_fuchsia } else { ::core::mem::transmute(val) } }, get_buffer_collection_properties_fuchsia: unsafe { unsafe extern "system" fn get_buffer_collection_properties_fuchsia( _device: crate::vk::Device, _collection: BufferCollectionFUCHSIA, _p_properties: *mut BufferCollectionPropertiesFUCHSIA<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_buffer_collection_properties_fuchsia) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetBufferCollectionPropertiesFUCHSIA\0", ); let val = _f(cname); if val.is_null() { get_buffer_collection_properties_fuchsia } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged GGP"] pub mod ggp { #[doc = "VK_GGP_stream_descriptor_surface"] pub mod stream_descriptor_surface { use super::super::*; pub use { crate::vk::GGP_STREAM_DESCRIPTOR_SURFACE_NAME as NAME, crate::vk::GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_GGP_stream_descriptor_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_GGP_stream_descriptor_surface instance-level function pointers"] pub struct InstanceFn { pub create_stream_descriptor_surface_ggp: PFN_vkCreateStreamDescriptorSurfaceGGP, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_stream_descriptor_surface_ggp: unsafe { unsafe extern "system" fn create_stream_descriptor_surface_ggp( _instance: crate::vk::Instance, _p_create_info: *const StreamDescriptorSurfaceCreateInfoGGP<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_stream_descriptor_surface_ggp) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateStreamDescriptorSurfaceGGP\0", ); let val = _f(cname); if val.is_null() { create_stream_descriptor_surface_ggp } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_GGP_frame_token"] pub mod frame_token { use super::super::*; pub use { crate::vk::GGP_FRAME_TOKEN_NAME as NAME, crate::vk::GGP_FRAME_TOKEN_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged GOOGLE"] pub mod google { #[doc = "VK_GOOGLE_display_timing"] pub mod display_timing { use super::super::*; pub use { crate::vk::GOOGLE_DISPLAY_TIMING_NAME as NAME, crate::vk::GOOGLE_DISPLAY_TIMING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_GOOGLE_display_timing device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_GOOGLE_display_timing device-level function pointers"] pub struct DeviceFn { pub get_refresh_cycle_duration_google: PFN_vkGetRefreshCycleDurationGOOGLE, pub get_past_presentation_timing_google: PFN_vkGetPastPresentationTimingGOOGLE, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_refresh_cycle_duration_google: unsafe { unsafe extern "system" fn get_refresh_cycle_duration_google( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_display_timing_properties: *mut RefreshCycleDurationGOOGLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_refresh_cycle_duration_google) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRefreshCycleDurationGOOGLE\0", ); let val = _f(cname); if val.is_null() { get_refresh_cycle_duration_google } else { ::core::mem::transmute(val) } }, get_past_presentation_timing_google: unsafe { unsafe extern "system" fn get_past_presentation_timing_google( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_presentation_timing_count: *mut u32, _p_presentation_timings: *mut PastPresentationTimingGOOGLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_past_presentation_timing_google) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPastPresentationTimingGOOGLE\0", ); let val = _f(cname); if val.is_null() { get_past_presentation_timing_google } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_GOOGLE_hlsl_functionality1"] pub mod hlsl_functionality1 { use super::super::*; pub use { crate::vk::GOOGLE_HLSL_FUNCTIONALITY1_NAME as NAME, crate::vk::GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_GOOGLE_decorate_string"] pub mod decorate_string { use super::super::*; pub use { crate::vk::GOOGLE_DECORATE_STRING_NAME as NAME, crate::vk::GOOGLE_DECORATE_STRING_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_GOOGLE_user_type"] pub mod user_type { use super::super::*; pub use { crate::vk::GOOGLE_USER_TYPE_NAME as NAME, crate::vk::GOOGLE_USER_TYPE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_GOOGLE_surfaceless_query"] pub mod surfaceless_query { use super::super::*; pub use { crate::vk::GOOGLE_SURFACELESS_QUERY_NAME as NAME, crate::vk::GOOGLE_SURFACELESS_QUERY_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged HUAWEI"] pub mod huawei { #[doc = "VK_HUAWEI_subpass_shading"] pub mod subpass_shading { use super::super::*; pub use { crate::vk::HUAWEI_SUBPASS_SHADING_NAME as NAME, crate::vk::HUAWEI_SUBPASS_SHADING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_HUAWEI_subpass_shading device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_HUAWEI_subpass_shading device-level function pointers"] pub struct DeviceFn { pub get_device_subpass_shading_max_workgroup_size_huawei: PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI, pub cmd_subpass_shading_huawei: PFN_vkCmdSubpassShadingHUAWEI, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_device_subpass_shading_max_workgroup_size_huawei: unsafe { unsafe extern "system" fn get_device_subpass_shading_max_workgroup_size_huawei( _device: crate::vk::Device, _renderpass: RenderPass, _p_max_workgroup_size: *mut Extent2D, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_subpass_shading_max_workgroup_size_huawei) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI\0", ); let val = _f(cname); if val.is_null() { get_device_subpass_shading_max_workgroup_size_huawei } else { ::core::mem::transmute(val) } }, cmd_subpass_shading_huawei: unsafe { unsafe extern "system" fn cmd_subpass_shading_huawei( _command_buffer: CommandBuffer, ) { panic!(concat!( "Unable to load ", stringify!(cmd_subpass_shading_huawei) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSubpassShadingHUAWEI\0"); let val = _f(cname); if val.is_null() { cmd_subpass_shading_huawei } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_HUAWEI_invocation_mask"] pub mod invocation_mask { use super::super::*; pub use { crate::vk::HUAWEI_INVOCATION_MASK_NAME as NAME, crate::vk::HUAWEI_INVOCATION_MASK_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_HUAWEI_invocation_mask device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_HUAWEI_invocation_mask device-level function pointers"] pub struct DeviceFn { pub cmd_bind_invocation_mask_huawei: PFN_vkCmdBindInvocationMaskHUAWEI, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_bind_invocation_mask_huawei: unsafe { unsafe extern "system" fn cmd_bind_invocation_mask_huawei( _command_buffer: CommandBuffer, _image_view: ImageView, _image_layout: ImageLayout, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_invocation_mask_huawei) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindInvocationMaskHUAWEI\0"); let val = _f(cname); if val.is_null() { cmd_bind_invocation_mask_huawei } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_HUAWEI_cluster_culling_shader"] pub mod cluster_culling_shader { use super::super::*; pub use { crate::vk::HUAWEI_CLUSTER_CULLING_SHADER_NAME as NAME, crate::vk::HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_HUAWEI_cluster_culling_shader device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_HUAWEI_cluster_culling_shader device-level function pointers"] pub struct DeviceFn { pub cmd_draw_cluster_huawei: PFN_vkCmdDrawClusterHUAWEI, pub cmd_draw_cluster_indirect_huawei: PFN_vkCmdDrawClusterIndirectHUAWEI, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_cluster_huawei: unsafe { unsafe extern "system" fn cmd_draw_cluster_huawei( _command_buffer: CommandBuffer, _group_count_x: u32, _group_count_y: u32, _group_count_z: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_cluster_huawei) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawClusterHUAWEI\0"); let val = _f(cname); if val.is_null() { cmd_draw_cluster_huawei } else { ::core::mem::transmute(val) } }, cmd_draw_cluster_indirect_huawei: unsafe { unsafe extern "system" fn cmd_draw_cluster_indirect_huawei( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_cluster_indirect_huawei) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDrawClusterIndirectHUAWEI\0", ); let val = _f(cname); if val.is_null() { cmd_draw_cluster_indirect_huawei } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged IMG"] pub mod img { #[doc = "VK_IMG_filter_cubic"] pub mod filter_cubic { use super::super::*; pub use { crate::vk::IMG_FILTER_CUBIC_NAME as NAME, crate::vk::IMG_FILTER_CUBIC_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_IMG_format_pvrtc"] pub mod format_pvrtc { use super::super::*; pub use { crate::vk::IMG_FORMAT_PVRTC_NAME as NAME, crate::vk::IMG_FORMAT_PVRTC_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_IMG_relaxed_line_rasterization"] pub mod relaxed_line_rasterization { use super::super::*; pub use { crate::vk::IMG_RELAXED_LINE_RASTERIZATION_NAME as NAME, crate::vk::IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged INTEL"] pub mod intel { #[doc = "VK_INTEL_shader_integer_functions2"] pub mod shader_integer_functions2 { use super::super::*; pub use { crate::vk::INTEL_SHADER_INTEGER_FUNCTIONS2_NAME as NAME, crate::vk::INTEL_SHADER_INTEGER_FUNCTIONS2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_INTEL_performance_query"] pub mod performance_query { use super::super::*; pub use { crate::vk::INTEL_PERFORMANCE_QUERY_NAME as NAME, crate::vk::INTEL_PERFORMANCE_QUERY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_INTEL_performance_query device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_INTEL_performance_query device-level function pointers"] pub struct DeviceFn { pub initialize_performance_api_intel: PFN_vkInitializePerformanceApiINTEL, pub uninitialize_performance_api_intel: PFN_vkUninitializePerformanceApiINTEL, pub cmd_set_performance_marker_intel: PFN_vkCmdSetPerformanceMarkerINTEL, pub cmd_set_performance_stream_marker_intel: PFN_vkCmdSetPerformanceStreamMarkerINTEL, pub cmd_set_performance_override_intel: PFN_vkCmdSetPerformanceOverrideINTEL, pub acquire_performance_configuration_intel: PFN_vkAcquirePerformanceConfigurationINTEL, pub release_performance_configuration_intel: PFN_vkReleasePerformanceConfigurationINTEL, pub queue_set_performance_configuration_intel: PFN_vkQueueSetPerformanceConfigurationINTEL, pub get_performance_parameter_intel: PFN_vkGetPerformanceParameterINTEL, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { initialize_performance_api_intel: unsafe { unsafe extern "system" fn initialize_performance_api_intel( _device: crate::vk::Device, _p_initialize_info: *const InitializePerformanceApiInfoINTEL<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(initialize_performance_api_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkInitializePerformanceApiINTEL\0", ); let val = _f(cname); if val.is_null() { initialize_performance_api_intel } else { ::core::mem::transmute(val) } }, uninitialize_performance_api_intel: unsafe { unsafe extern "system" fn uninitialize_performance_api_intel( _device: crate::vk::Device, ) { panic!(concat!( "Unable to load ", stringify!(uninitialize_performance_api_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkUninitializePerformanceApiINTEL\0", ); let val = _f(cname); if val.is_null() { uninitialize_performance_api_intel } else { ::core::mem::transmute(val) } }, cmd_set_performance_marker_intel: unsafe { unsafe extern "system" fn cmd_set_performance_marker_intel( _command_buffer: CommandBuffer, _p_marker_info: *const PerformanceMarkerInfoINTEL<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(cmd_set_performance_marker_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetPerformanceMarkerINTEL\0", ); let val = _f(cname); if val.is_null() { cmd_set_performance_marker_intel } else { ::core::mem::transmute(val) } }, cmd_set_performance_stream_marker_intel: unsafe { unsafe extern "system" fn cmd_set_performance_stream_marker_intel( _command_buffer: CommandBuffer, _p_marker_info: *const PerformanceStreamMarkerInfoINTEL<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(cmd_set_performance_stream_marker_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetPerformanceStreamMarkerINTEL\0", ); let val = _f(cname); if val.is_null() { cmd_set_performance_stream_marker_intel } else { ::core::mem::transmute(val) } }, cmd_set_performance_override_intel: unsafe { unsafe extern "system" fn cmd_set_performance_override_intel( _command_buffer: CommandBuffer, _p_override_info: *const PerformanceOverrideInfoINTEL<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(cmd_set_performance_override_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetPerformanceOverrideINTEL\0", ); let val = _f(cname); if val.is_null() { cmd_set_performance_override_intel } else { ::core::mem::transmute(val) } }, acquire_performance_configuration_intel: unsafe { unsafe extern "system" fn acquire_performance_configuration_intel( _device: crate::vk::Device, _p_acquire_info: *const PerformanceConfigurationAcquireInfoINTEL<'_>, _p_configuration: *mut PerformanceConfigurationINTEL, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_performance_configuration_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkAcquirePerformanceConfigurationINTEL\0", ); let val = _f(cname); if val.is_null() { acquire_performance_configuration_intel } else { ::core::mem::transmute(val) } }, release_performance_configuration_intel: unsafe { unsafe extern "system" fn release_performance_configuration_intel( _device: crate::vk::Device, _configuration: PerformanceConfigurationINTEL, ) -> Result { panic!(concat!( "Unable to load ", stringify!(release_performance_configuration_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkReleasePerformanceConfigurationINTEL\0", ); let val = _f(cname); if val.is_null() { release_performance_configuration_intel } else { ::core::mem::transmute(val) } }, queue_set_performance_configuration_intel: unsafe { unsafe extern "system" fn queue_set_performance_configuration_intel( _queue: Queue, _configuration: PerformanceConfigurationINTEL, ) -> Result { panic!(concat!( "Unable to load ", stringify!(queue_set_performance_configuration_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkQueueSetPerformanceConfigurationINTEL\0", ); let val = _f(cname); if val.is_null() { queue_set_performance_configuration_intel } else { ::core::mem::transmute(val) } }, get_performance_parameter_intel: unsafe { unsafe extern "system" fn get_performance_parameter_intel( _device: crate::vk::Device, _parameter: PerformanceParameterTypeINTEL, _p_value: *mut PerformanceValueINTEL, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_performance_parameter_intel) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPerformanceParameterINTEL\0", ); let val = _f(cname); if val.is_null() { get_performance_parameter_intel } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged KHR"] pub mod khr { #[doc = "VK_KHR_surface"] pub mod surface { use super::super::*; pub use { crate::vk::KHR_SURFACE_NAME as NAME, crate::vk::KHR_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_surface instance-level function pointers"] pub struct InstanceFn { pub destroy_surface_khr: PFN_vkDestroySurfaceKHR, pub get_physical_device_surface_support_khr: PFN_vkGetPhysicalDeviceSurfaceSupportKHR, pub get_physical_device_surface_capabilities_khr: PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR, pub get_physical_device_surface_formats_khr: PFN_vkGetPhysicalDeviceSurfaceFormatsKHR, pub get_physical_device_surface_present_modes_khr: PFN_vkGetPhysicalDeviceSurfacePresentModesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { destroy_surface_khr: unsafe { unsafe extern "system" fn destroy_surface_khr( _instance: crate::vk::Instance, _surface: SurfaceKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_surface_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroySurfaceKHR\0"); let val = _f(cname); if val.is_null() { destroy_surface_khr } else { ::core::mem::transmute(val) } }, get_physical_device_surface_support_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_support_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, _surface: SurfaceKHR, _p_supported: *mut Bool32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_support_khr } else { ::core::mem::transmute(val) } }, get_physical_device_surface_capabilities_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_capabilities_khr( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_surface_capabilities: *mut SurfaceCapabilitiesKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_capabilities_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceCapabilitiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_capabilities_khr } else { ::core::mem::transmute(val) } }, get_physical_device_surface_formats_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_formats_khr( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_surface_format_count: *mut u32, _p_surface_formats: *mut SurfaceFormatKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_formats_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceFormatsKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_formats_khr } else { ::core::mem::transmute(val) } }, get_physical_device_surface_present_modes_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_present_modes_khr( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_present_mode_count: *mut u32, _p_present_modes: *mut PresentModeKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_present_modes_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfacePresentModesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_present_modes_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_swapchain"] pub mod swapchain { use super::super::*; pub use { crate::vk::KHR_SWAPCHAIN_NAME as NAME, crate::vk::KHR_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_swapchain instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_swapchain instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_present_rectangles_khr: PFN_vkGetPhysicalDevicePresentRectanglesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_present_rectangles_khr: unsafe { unsafe extern "system" fn get_physical_device_present_rectangles_khr( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_rect_count: *mut u32, _p_rects: *mut Rect2D, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_present_rectangles_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDevicePresentRectanglesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_present_rectangles_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_swapchain device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_swapchain device-level function pointers"] pub struct DeviceFn { pub create_swapchain_khr: PFN_vkCreateSwapchainKHR, pub destroy_swapchain_khr: PFN_vkDestroySwapchainKHR, pub get_swapchain_images_khr: PFN_vkGetSwapchainImagesKHR, pub acquire_next_image_khr: PFN_vkAcquireNextImageKHR, pub queue_present_khr: PFN_vkQueuePresentKHR, pub get_device_group_present_capabilities_khr: PFN_vkGetDeviceGroupPresentCapabilitiesKHR, pub get_device_group_surface_present_modes_khr: PFN_vkGetDeviceGroupSurfacePresentModesKHR, pub acquire_next_image2_khr: PFN_vkAcquireNextImage2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_swapchain_khr: unsafe { unsafe extern "system" fn create_swapchain_khr( _device: crate::vk::Device, _p_create_info: *const SwapchainCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_swapchain: *mut SwapchainKHR, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_swapchain_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateSwapchainKHR\0"); let val = _f(cname); if val.is_null() { create_swapchain_khr } else { ::core::mem::transmute(val) } }, destroy_swapchain_khr: unsafe { unsafe extern "system" fn destroy_swapchain_khr( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_swapchain_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroySwapchainKHR\0"); let val = _f(cname); if val.is_null() { destroy_swapchain_khr } else { ::core::mem::transmute(val) } }, get_swapchain_images_khr: unsafe { unsafe extern "system" fn get_swapchain_images_khr( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_swapchain_image_count: *mut u32, _p_swapchain_images: *mut Image, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_swapchain_images_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainImagesKHR\0"); let val = _f(cname); if val.is_null() { get_swapchain_images_khr } else { ::core::mem::transmute(val) } }, acquire_next_image_khr: unsafe { unsafe extern "system" fn acquire_next_image_khr( _device: crate::vk::Device, _swapchain: SwapchainKHR, _timeout: u64, _semaphore: Semaphore, _fence: Fence, _p_image_index: *mut u32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_next_image_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImageKHR\0"); let val = _f(cname); if val.is_null() { acquire_next_image_khr } else { ::core::mem::transmute(val) } }, queue_present_khr: unsafe { unsafe extern "system" fn queue_present_khr( _queue: Queue, _p_present_info: *const PresentInfoKHR<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(queue_present_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueuePresentKHR\0"); let val = _f(cname); if val.is_null() { queue_present_khr } else { ::core::mem::transmute(val) } }, get_device_group_present_capabilities_khr: unsafe { unsafe extern "system" fn get_device_group_present_capabilities_khr( _device: crate::vk::Device, _p_device_group_present_capabilities : * mut DeviceGroupPresentCapabilitiesKHR < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_group_present_capabilities_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupPresentCapabilitiesKHR\0", ); let val = _f(cname); if val.is_null() { get_device_group_present_capabilities_khr } else { ::core::mem::transmute(val) } }, get_device_group_surface_present_modes_khr: unsafe { unsafe extern "system" fn get_device_group_surface_present_modes_khr( _device: crate::vk::Device, _surface: SurfaceKHR, _p_modes: *mut DeviceGroupPresentModeFlagsKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_group_surface_present_modes_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupSurfacePresentModesKHR\0", ); let val = _f(cname); if val.is_null() { get_device_group_surface_present_modes_khr } else { ::core::mem::transmute(val) } }, acquire_next_image2_khr: unsafe { unsafe extern "system" fn acquire_next_image2_khr( _device: crate::vk::Device, _p_acquire_info: *const AcquireNextImageInfoKHR<'_>, _p_image_index: *mut u32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_next_image2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImage2KHR\0"); let val = _f(cname); if val.is_null() { acquire_next_image2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_display"] pub mod display { use super::super::*; pub use { crate::vk::KHR_DISPLAY_NAME as NAME, crate::vk::KHR_DISPLAY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_display instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_display instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_display_properties_khr: PFN_vkGetPhysicalDeviceDisplayPropertiesKHR, pub get_physical_device_display_plane_properties_khr: PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR, pub get_display_plane_supported_displays_khr: PFN_vkGetDisplayPlaneSupportedDisplaysKHR, pub get_display_mode_properties_khr: PFN_vkGetDisplayModePropertiesKHR, pub create_display_mode_khr: PFN_vkCreateDisplayModeKHR, pub get_display_plane_capabilities_khr: PFN_vkGetDisplayPlaneCapabilitiesKHR, pub create_display_plane_surface_khr: PFN_vkCreateDisplayPlaneSurfaceKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_display_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_display_properties_khr( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut DisplayPropertiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_display_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceDisplayPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_display_properties_khr } else { ::core::mem::transmute(val) } }, get_physical_device_display_plane_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_display_plane_properties_khr( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut DisplayPlanePropertiesKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_display_plane_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceDisplayPlanePropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_display_plane_properties_khr } else { ::core::mem::transmute(val) } }, get_display_plane_supported_displays_khr: unsafe { unsafe extern "system" fn get_display_plane_supported_displays_khr( _physical_device: PhysicalDevice, _plane_index: u32, _p_display_count: *mut u32, _p_displays: *mut DisplayKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_display_plane_supported_displays_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDisplayPlaneSupportedDisplaysKHR\0", ); let val = _f(cname); if val.is_null() { get_display_plane_supported_displays_khr } else { ::core::mem::transmute(val) } }, get_display_mode_properties_khr: unsafe { unsafe extern "system" fn get_display_mode_properties_khr( _physical_device: PhysicalDevice, _display: DisplayKHR, _p_property_count: *mut u32, _p_properties: *mut DisplayModePropertiesKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_display_mode_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDisplayModePropertiesKHR\0"); let val = _f(cname); if val.is_null() { get_display_mode_properties_khr } else { ::core::mem::transmute(val) } }, create_display_mode_khr: unsafe { unsafe extern "system" fn create_display_mode_khr( _physical_device: PhysicalDevice, _display: DisplayKHR, _p_create_info: *const DisplayModeCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_mode: *mut DisplayModeKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_display_mode_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDisplayModeKHR\0"); let val = _f(cname); if val.is_null() { create_display_mode_khr } else { ::core::mem::transmute(val) } }, get_display_plane_capabilities_khr: unsafe { unsafe extern "system" fn get_display_plane_capabilities_khr( _physical_device: PhysicalDevice, _mode: DisplayModeKHR, _plane_index: u32, _p_capabilities: *mut DisplayPlaneCapabilitiesKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_display_plane_capabilities_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDisplayPlaneCapabilitiesKHR\0", ); let val = _f(cname); if val.is_null() { get_display_plane_capabilities_khr } else { ::core::mem::transmute(val) } }, create_display_plane_surface_khr: unsafe { unsafe extern "system" fn create_display_plane_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const DisplaySurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_display_plane_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateDisplayPlaneSurfaceKHR\0", ); let val = _f(cname); if val.is_null() { create_display_plane_surface_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_display_swapchain"] pub mod display_swapchain { use super::super::*; pub use { crate::vk::KHR_DISPLAY_SWAPCHAIN_NAME as NAME, crate::vk::KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_display_swapchain device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_display_swapchain device-level function pointers"] pub struct DeviceFn { pub create_shared_swapchains_khr: PFN_vkCreateSharedSwapchainsKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_shared_swapchains_khr: unsafe { unsafe extern "system" fn create_shared_swapchains_khr( _device: crate::vk::Device, _swapchain_count: u32, _p_create_infos: *const SwapchainCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_swapchains: *mut SwapchainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_shared_swapchains_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateSharedSwapchainsKHR\0"); let val = _f(cname); if val.is_null() { create_shared_swapchains_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_xlib_surface"] pub mod xlib_surface { use super::super::*; pub use { crate::vk::KHR_XLIB_SURFACE_NAME as NAME, crate::vk::KHR_XLIB_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_xlib_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_xlib_surface instance-level function pointers"] pub struct InstanceFn { pub create_xlib_surface_khr: PFN_vkCreateXlibSurfaceKHR, pub get_physical_device_xlib_presentation_support_khr: PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_xlib_surface_khr: unsafe { unsafe extern "system" fn create_xlib_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const XlibSurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_xlib_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateXlibSurfaceKHR\0"); let val = _f(cname); if val.is_null() { create_xlib_surface_khr } else { ::core::mem::transmute(val) } }, get_physical_device_xlib_presentation_support_khr: unsafe { unsafe extern "system" fn get_physical_device_xlib_presentation_support_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, _dpy: *mut Display, _visual_id: VisualID, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_xlib_presentation_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceXlibPresentationSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_xlib_presentation_support_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_xcb_surface"] pub mod xcb_surface { use super::super::*; pub use { crate::vk::KHR_XCB_SURFACE_NAME as NAME, crate::vk::KHR_XCB_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_xcb_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_xcb_surface instance-level function pointers"] pub struct InstanceFn { pub create_xcb_surface_khr: PFN_vkCreateXcbSurfaceKHR, pub get_physical_device_xcb_presentation_support_khr: PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_xcb_surface_khr: unsafe { unsafe extern "system" fn create_xcb_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const XcbSurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_xcb_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateXcbSurfaceKHR\0"); let val = _f(cname); if val.is_null() { create_xcb_surface_khr } else { ::core::mem::transmute(val) } }, get_physical_device_xcb_presentation_support_khr: unsafe { unsafe extern "system" fn get_physical_device_xcb_presentation_support_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, _connection: *mut xcb_connection_t, _visual_id: xcb_visualid_t, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_xcb_presentation_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceXcbPresentationSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_xcb_presentation_support_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_wayland_surface"] pub mod wayland_surface { use super::super::*; pub use { crate::vk::KHR_WAYLAND_SURFACE_NAME as NAME, crate::vk::KHR_WAYLAND_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_wayland_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_wayland_surface instance-level function pointers"] pub struct InstanceFn { pub create_wayland_surface_khr: PFN_vkCreateWaylandSurfaceKHR, pub get_physical_device_wayland_presentation_support_khr: PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_wayland_surface_khr: unsafe { unsafe extern "system" fn create_wayland_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const WaylandSurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_wayland_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateWaylandSurfaceKHR\0"); let val = _f(cname); if val.is_null() { create_wayland_surface_khr } else { ::core::mem::transmute(val) } }, get_physical_device_wayland_presentation_support_khr: unsafe { unsafe extern "system" fn get_physical_device_wayland_presentation_support_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, _display: *mut wl_display, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_wayland_presentation_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceWaylandPresentationSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_wayland_presentation_support_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_android_surface"] pub mod android_surface { use super::super::*; pub use { crate::vk::KHR_ANDROID_SURFACE_NAME as NAME, crate::vk::KHR_ANDROID_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_android_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_android_surface instance-level function pointers"] pub struct InstanceFn { pub create_android_surface_khr: PFN_vkCreateAndroidSurfaceKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_android_surface_khr: unsafe { unsafe extern "system" fn create_android_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const AndroidSurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_android_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateAndroidSurfaceKHR\0"); let val = _f(cname); if val.is_null() { create_android_surface_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_win32_surface"] pub mod win32_surface { use super::super::*; pub use { crate::vk::KHR_WIN32_SURFACE_NAME as NAME, crate::vk::KHR_WIN32_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_win32_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_win32_surface instance-level function pointers"] pub struct InstanceFn { pub create_win32_surface_khr: PFN_vkCreateWin32SurfaceKHR, pub get_physical_device_win32_presentation_support_khr: PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_win32_surface_khr: unsafe { unsafe extern "system" fn create_win32_surface_khr( _instance: crate::vk::Instance, _p_create_info: *const Win32SurfaceCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_win32_surface_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateWin32SurfaceKHR\0"); let val = _f(cname); if val.is_null() { create_win32_surface_khr } else { ::core::mem::transmute(val) } }, get_physical_device_win32_presentation_support_khr: unsafe { unsafe extern "system" fn get_physical_device_win32_presentation_support_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_win32_presentation_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceWin32PresentationSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_win32_presentation_support_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_sampler_mirror_clamp_to_edge"] pub mod sampler_mirror_clamp_to_edge { use super::super::*; pub use { crate::vk::KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_NAME as NAME, crate::vk::KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_queue"] pub mod video_queue { use super::super::*; pub use { crate::vk::KHR_VIDEO_QUEUE_NAME as NAME, crate::vk::KHR_VIDEO_QUEUE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_video_queue instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_video_queue instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_video_capabilities_khr: PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR, pub get_physical_device_video_format_properties_khr: PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_video_capabilities_khr: unsafe { unsafe extern "system" fn get_physical_device_video_capabilities_khr( _physical_device: PhysicalDevice, _p_video_profile: *const VideoProfileInfoKHR<'_>, _p_capabilities: *mut VideoCapabilitiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_video_capabilities_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceVideoCapabilitiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_video_capabilities_khr } else { ::core::mem::transmute(val) } }, get_physical_device_video_format_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_video_format_properties_khr( _physical_device: PhysicalDevice, _p_video_format_info: *const PhysicalDeviceVideoFormatInfoKHR<'_>, _p_video_format_property_count: *mut u32, _p_video_format_properties: *mut VideoFormatPropertiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_video_format_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceVideoFormatPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_video_format_properties_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_video_queue device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_video_queue device-level function pointers"] pub struct DeviceFn { pub create_video_session_khr: PFN_vkCreateVideoSessionKHR, pub destroy_video_session_khr: PFN_vkDestroyVideoSessionKHR, pub get_video_session_memory_requirements_khr: PFN_vkGetVideoSessionMemoryRequirementsKHR, pub bind_video_session_memory_khr: PFN_vkBindVideoSessionMemoryKHR, pub create_video_session_parameters_khr: PFN_vkCreateVideoSessionParametersKHR, pub update_video_session_parameters_khr: PFN_vkUpdateVideoSessionParametersKHR, pub destroy_video_session_parameters_khr: PFN_vkDestroyVideoSessionParametersKHR, pub cmd_begin_video_coding_khr: PFN_vkCmdBeginVideoCodingKHR, pub cmd_end_video_coding_khr: PFN_vkCmdEndVideoCodingKHR, pub cmd_control_video_coding_khr: PFN_vkCmdControlVideoCodingKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_video_session_khr: unsafe { unsafe extern "system" fn create_video_session_khr( _device: crate::vk::Device, _p_create_info: *const VideoSessionCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_video_session: *mut VideoSessionKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_video_session_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateVideoSessionKHR\0"); let val = _f(cname); if val.is_null() { create_video_session_khr } else { ::core::mem::transmute(val) } }, destroy_video_session_khr: unsafe { unsafe extern "system" fn destroy_video_session_khr( _device: crate::vk::Device, _video_session: VideoSessionKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_video_session_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyVideoSessionKHR\0"); let val = _f(cname); if val.is_null() { destroy_video_session_khr } else { ::core::mem::transmute(val) } }, get_video_session_memory_requirements_khr: unsafe { unsafe extern "system" fn get_video_session_memory_requirements_khr( _device: crate::vk::Device, _video_session: VideoSessionKHR, _p_memory_requirements_count: *mut u32, _p_memory_requirements: *mut VideoSessionMemoryRequirementsKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_video_session_memory_requirements_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetVideoSessionMemoryRequirementsKHR\0", ); let val = _f(cname); if val.is_null() { get_video_session_memory_requirements_khr } else { ::core::mem::transmute(val) } }, bind_video_session_memory_khr: unsafe { unsafe extern "system" fn bind_video_session_memory_khr( _device: crate::vk::Device, _video_session: VideoSessionKHR, _bind_session_memory_info_count: u32, _p_bind_session_memory_infos: *const BindVideoSessionMemoryInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(bind_video_session_memory_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindVideoSessionMemoryKHR\0"); let val = _f(cname); if val.is_null() { bind_video_session_memory_khr } else { ::core::mem::transmute(val) } }, create_video_session_parameters_khr: unsafe { unsafe extern "system" fn create_video_session_parameters_khr( _device: crate::vk::Device, _p_create_info: *const VideoSessionParametersCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_video_session_parameters: *mut VideoSessionParametersKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_video_session_parameters_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateVideoSessionParametersKHR\0", ); let val = _f(cname); if val.is_null() { create_video_session_parameters_khr } else { ::core::mem::transmute(val) } }, update_video_session_parameters_khr: unsafe { unsafe extern "system" fn update_video_session_parameters_khr( _device: crate::vk::Device, _video_session_parameters: VideoSessionParametersKHR, _p_update_info: *const VideoSessionParametersUpdateInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(update_video_session_parameters_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkUpdateVideoSessionParametersKHR\0", ); let val = _f(cname); if val.is_null() { update_video_session_parameters_khr } else { ::core::mem::transmute(val) } }, destroy_video_session_parameters_khr: unsafe { unsafe extern "system" fn destroy_video_session_parameters_khr( _device: crate::vk::Device, _video_session_parameters: VideoSessionParametersKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_video_session_parameters_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyVideoSessionParametersKHR\0", ); let val = _f(cname); if val.is_null() { destroy_video_session_parameters_khr } else { ::core::mem::transmute(val) } }, cmd_begin_video_coding_khr: unsafe { unsafe extern "system" fn cmd_begin_video_coding_khr( _command_buffer: CommandBuffer, _p_begin_info: *const VideoBeginCodingInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_video_coding_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginVideoCodingKHR\0"); let val = _f(cname); if val.is_null() { cmd_begin_video_coding_khr } else { ::core::mem::transmute(val) } }, cmd_end_video_coding_khr: unsafe { unsafe extern "system" fn cmd_end_video_coding_khr( _command_buffer: CommandBuffer, _p_end_coding_info: *const VideoEndCodingInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_video_coding_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndVideoCodingKHR\0"); let val = _f(cname); if val.is_null() { cmd_end_video_coding_khr } else { ::core::mem::transmute(val) } }, cmd_control_video_coding_khr: unsafe { unsafe extern "system" fn cmd_control_video_coding_khr( _command_buffer: CommandBuffer, _p_coding_control_info: *const VideoCodingControlInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_control_video_coding_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdControlVideoCodingKHR\0"); let val = _f(cname); if val.is_null() { cmd_control_video_coding_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_video_decode_queue"] pub mod video_decode_queue { use super::super::*; pub use { crate::vk::KHR_VIDEO_DECODE_QUEUE_NAME as NAME, crate::vk::KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_video_decode_queue device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_video_decode_queue device-level function pointers"] pub struct DeviceFn { pub cmd_decode_video_khr: PFN_vkCmdDecodeVideoKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_decode_video_khr: unsafe { unsafe extern "system" fn cmd_decode_video_khr( _command_buffer: CommandBuffer, _p_decode_info: *const VideoDecodeInfoKHR<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_decode_video_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDecodeVideoKHR\0"); let val = _f(cname); if val.is_null() { cmd_decode_video_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_video_encode_h264"] pub mod video_encode_h264 { use super::super::*; pub use { crate::vk::KHR_VIDEO_ENCODE_H264_NAME as NAME, crate::vk::KHR_VIDEO_ENCODE_H264_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_encode_h265"] pub mod video_encode_h265 { use super::super::*; pub use { crate::vk::KHR_VIDEO_ENCODE_H265_NAME as NAME, crate::vk::KHR_VIDEO_ENCODE_H265_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_decode_h264"] pub mod video_decode_h264 { use super::super::*; pub use { crate::vk::KHR_VIDEO_DECODE_H264_NAME as NAME, crate::vk::KHR_VIDEO_DECODE_H264_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_dynamic_rendering"] pub mod dynamic_rendering { use super::super::*; pub use { crate::vk::KHR_DYNAMIC_RENDERING_NAME as NAME, crate::vk::KHR_DYNAMIC_RENDERING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_dynamic_rendering device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_dynamic_rendering device-level function pointers"] pub struct DeviceFn { pub cmd_begin_rendering_khr: PFN_vkCmdBeginRendering, pub cmd_end_rendering_khr: PFN_vkCmdEndRendering, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_begin_rendering_khr: unsafe { unsafe extern "system" fn cmd_begin_rendering_khr( _command_buffer: CommandBuffer, _p_rendering_info: *const RenderingInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_rendering_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderingKHR\0"); let val = _f(cname); if val.is_null() { cmd_begin_rendering_khr } else { ::core::mem::transmute(val) } }, cmd_end_rendering_khr: unsafe { unsafe extern "system" fn cmd_end_rendering_khr( _command_buffer: CommandBuffer, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_rendering_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderingKHR\0"); let val = _f(cname); if val.is_null() { cmd_end_rendering_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_multiview"] pub mod multiview { use super::super::*; pub use { crate::vk::KHR_MULTIVIEW_NAME as NAME, crate::vk::KHR_MULTIVIEW_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_get_physical_device_properties2"] pub mod get_physical_device_properties2 { use super::super::*; pub use { crate::vk::KHR_GET_PHYSICAL_DEVICE_PROPERTIES2_NAME as NAME, crate::vk::KHR_GET_PHYSICAL_DEVICE_PROPERTIES2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_get_physical_device_properties2 instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_get_physical_device_properties2 instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_features2_khr: PFN_vkGetPhysicalDeviceFeatures2, pub get_physical_device_properties2_khr: PFN_vkGetPhysicalDeviceProperties2, pub get_physical_device_format_properties2_khr: PFN_vkGetPhysicalDeviceFormatProperties2, pub get_physical_device_image_format_properties2_khr: PFN_vkGetPhysicalDeviceImageFormatProperties2, pub get_physical_device_queue_family_properties2_khr: PFN_vkGetPhysicalDeviceQueueFamilyProperties2, pub get_physical_device_memory_properties2_khr: PFN_vkGetPhysicalDeviceMemoryProperties2, pub get_physical_device_sparse_image_format_properties2_khr: PFN_vkGetPhysicalDeviceSparseImageFormatProperties2, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_features2_khr: unsafe { unsafe extern "system" fn get_physical_device_features2_khr( _physical_device: PhysicalDevice, _p_features: *mut PhysicalDeviceFeatures2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_features2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceFeatures2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_features2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_properties2_khr( _physical_device: PhysicalDevice, _p_properties: *mut PhysicalDeviceProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_format_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_format_properties2_khr( _physical_device: PhysicalDevice, _format: Format, _p_format_properties: *mut FormatProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_format_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceFormatProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_format_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_image_format_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_image_format_properties2_khr( _physical_device: PhysicalDevice, _p_image_format_info: *const PhysicalDeviceImageFormatInfo2<'_>, _p_image_format_properties: *mut ImageFormatProperties2<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_image_format_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceImageFormatProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_image_format_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_queue_family_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_queue_family_properties2_khr( _physical_device: PhysicalDevice, _p_queue_family_property_count: *mut u32, _p_queue_family_properties: *mut QueueFamilyProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_queue_family_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceQueueFamilyProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_queue_family_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_memory_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_memory_properties2_khr( _physical_device: PhysicalDevice, _p_memory_properties: *mut PhysicalDeviceMemoryProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_memory_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceMemoryProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_memory_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_sparse_image_format_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_sparse_image_format_properties2_khr( _physical_device: PhysicalDevice, _p_format_info: *const PhysicalDeviceSparseImageFormatInfo2<'_>, _p_property_count: *mut u32, _p_properties: *mut SparseImageFormatProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_sparse_image_format_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSparseImageFormatProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_sparse_image_format_properties2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_device_group"] pub mod device_group { use super::super::*; pub use { crate::vk::KHR_DEVICE_GROUP_NAME as NAME, crate::vk::KHR_DEVICE_GROUP_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_device_group instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_device_group instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_present_rectangles_khr: PFN_vkGetPhysicalDevicePresentRectanglesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_present_rectangles_khr: unsafe { unsafe extern "system" fn get_physical_device_present_rectangles_khr( _physical_device: PhysicalDevice, _surface: SurfaceKHR, _p_rect_count: *mut u32, _p_rects: *mut Rect2D, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_present_rectangles_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDevicePresentRectanglesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_present_rectangles_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_device_group device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_device_group device-level function pointers"] pub struct DeviceFn { pub get_device_group_peer_memory_features_khr: PFN_vkGetDeviceGroupPeerMemoryFeatures, pub cmd_set_device_mask_khr: PFN_vkCmdSetDeviceMask, pub cmd_dispatch_base_khr: PFN_vkCmdDispatchBase, pub get_device_group_present_capabilities_khr: PFN_vkGetDeviceGroupPresentCapabilitiesKHR, pub get_device_group_surface_present_modes_khr: PFN_vkGetDeviceGroupSurfacePresentModesKHR, pub acquire_next_image2_khr: PFN_vkAcquireNextImage2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_device_group_peer_memory_features_khr: unsafe { unsafe extern "system" fn get_device_group_peer_memory_features_khr( _device: crate::vk::Device, _heap_index: u32, _local_device_index: u32, _remote_device_index: u32, _p_peer_memory_features: *mut PeerMemoryFeatureFlags, ) { panic!(concat!( "Unable to load ", stringify!(get_device_group_peer_memory_features_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupPeerMemoryFeaturesKHR\0", ); let val = _f(cname); if val.is_null() { get_device_group_peer_memory_features_khr } else { ::core::mem::transmute(val) } }, cmd_set_device_mask_khr: unsafe { unsafe extern "system" fn cmd_set_device_mask_khr( _command_buffer: CommandBuffer, _device_mask: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_device_mask_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDeviceMaskKHR\0"); let val = _f(cname); if val.is_null() { cmd_set_device_mask_khr } else { ::core::mem::transmute(val) } }, cmd_dispatch_base_khr: unsafe { unsafe extern "system" fn cmd_dispatch_base_khr( _command_buffer: CommandBuffer, _base_group_x: u32, _base_group_y: u32, _base_group_z: u32, _group_count_x: u32, _group_count_y: u32, _group_count_z: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_dispatch_base_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchBaseKHR\0"); let val = _f(cname); if val.is_null() { cmd_dispatch_base_khr } else { ::core::mem::transmute(val) } }, get_device_group_present_capabilities_khr: unsafe { unsafe extern "system" fn get_device_group_present_capabilities_khr( _device: crate::vk::Device, _p_device_group_present_capabilities : * mut DeviceGroupPresentCapabilitiesKHR < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_group_present_capabilities_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupPresentCapabilitiesKHR\0", ); let val = _f(cname); if val.is_null() { get_device_group_present_capabilities_khr } else { ::core::mem::transmute(val) } }, get_device_group_surface_present_modes_khr: unsafe { unsafe extern "system" fn get_device_group_surface_present_modes_khr( _device: crate::vk::Device, _surface: SurfaceKHR, _p_modes: *mut DeviceGroupPresentModeFlagsKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_device_group_surface_present_modes_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceGroupSurfacePresentModesKHR\0", ); let val = _f(cname); if val.is_null() { get_device_group_surface_present_modes_khr } else { ::core::mem::transmute(val) } }, acquire_next_image2_khr: unsafe { unsafe extern "system" fn acquire_next_image2_khr( _device: crate::vk::Device, _p_acquire_info: *const AcquireNextImageInfoKHR<'_>, _p_image_index: *mut u32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_next_image2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImage2KHR\0"); let val = _f(cname); if val.is_null() { acquire_next_image2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_draw_parameters"] pub mod shader_draw_parameters { use super::super::*; pub use { crate::vk::KHR_SHADER_DRAW_PARAMETERS_NAME as NAME, crate::vk::KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_maintenance1"] pub mod maintenance1 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE1_NAME as NAME, crate::vk::KHR_MAINTENANCE1_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_maintenance1 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_maintenance1 device-level function pointers"] pub struct DeviceFn { pub trim_command_pool_khr: PFN_vkTrimCommandPool, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { trim_command_pool_khr: unsafe { unsafe extern "system" fn trim_command_pool_khr( _device: crate::vk::Device, _command_pool: CommandPool, _flags: CommandPoolTrimFlags, ) { panic!(concat!( "Unable to load ", stringify!(trim_command_pool_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkTrimCommandPoolKHR\0"); let val = _f(cname); if val.is_null() { trim_command_pool_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_device_group_creation"] pub mod device_group_creation { use super::super::*; pub use { crate::vk::KHR_DEVICE_GROUP_CREATION_NAME as NAME, crate::vk::KHR_DEVICE_GROUP_CREATION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_device_group_creation instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_device_group_creation instance-level function pointers"] pub struct InstanceFn { pub enumerate_physical_device_groups_khr: PFN_vkEnumeratePhysicalDeviceGroups, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { enumerate_physical_device_groups_khr: unsafe { unsafe extern "system" fn enumerate_physical_device_groups_khr( _instance: crate::vk::Instance, _p_physical_device_group_count: *mut u32, _p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_physical_device_groups_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkEnumeratePhysicalDeviceGroupsKHR\0", ); let val = _f(cname); if val.is_null() { enumerate_physical_device_groups_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_memory_capabilities"] pub mod external_memory_capabilities { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_MEMORY_CAPABILITIES_NAME as NAME, crate::vk::KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_memory_capabilities instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_memory_capabilities instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_external_buffer_properties_khr: PFN_vkGetPhysicalDeviceExternalBufferProperties, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_external_buffer_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_external_buffer_properties_khr( _physical_device: PhysicalDevice, _p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo<'_>, _p_external_buffer_properties: *mut ExternalBufferProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_buffer_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalBufferPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_buffer_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_memory"] pub mod external_memory { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_MEMORY_NAME as NAME, crate::vk::KHR_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_external_memory_win32"] pub mod external_memory_win32 { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_MEMORY_WIN32_NAME as NAME, crate::vk::KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_memory_win32 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_memory_win32 device-level function pointers"] pub struct DeviceFn { pub get_memory_win32_handle_khr: PFN_vkGetMemoryWin32HandleKHR, pub get_memory_win32_handle_properties_khr: PFN_vkGetMemoryWin32HandlePropertiesKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_win32_handle_khr: unsafe { unsafe extern "system" fn get_memory_win32_handle_khr( _device: crate::vk::Device, _p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR<'_>, _p_handle: *mut HANDLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_win32_handle_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryWin32HandleKHR\0"); let val = _f(cname); if val.is_null() { get_memory_win32_handle_khr } else { ::core::mem::transmute(val) } }, get_memory_win32_handle_properties_khr: unsafe { unsafe extern "system" fn get_memory_win32_handle_properties_khr( _device: crate::vk::Device, _handle_type: ExternalMemoryHandleTypeFlags, _handle: HANDLE, _p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_win32_handle_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetMemoryWin32HandlePropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_memory_win32_handle_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_memory_fd"] pub mod external_memory_fd { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_MEMORY_FD_NAME as NAME, crate::vk::KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_memory_fd device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_memory_fd device-level function pointers"] pub struct DeviceFn { pub get_memory_fd_khr: PFN_vkGetMemoryFdKHR, pub get_memory_fd_properties_khr: PFN_vkGetMemoryFdPropertiesKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_fd_khr: unsafe { unsafe extern "system" fn get_memory_fd_khr( _device: crate::vk::Device, _p_get_fd_info: *const MemoryGetFdInfoKHR<'_>, _p_fd: *mut c_int, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_memory_fd_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryFdKHR\0"); let val = _f(cname); if val.is_null() { get_memory_fd_khr } else { ::core::mem::transmute(val) } }, get_memory_fd_properties_khr: unsafe { unsafe extern "system" fn get_memory_fd_properties_khr( _device: crate::vk::Device, _handle_type: ExternalMemoryHandleTypeFlags, _fd: c_int, _p_memory_fd_properties: *mut MemoryFdPropertiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_fd_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryFdPropertiesKHR\0"); let val = _f(cname); if val.is_null() { get_memory_fd_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_win32_keyed_mutex"] pub mod win32_keyed_mutex { use super::super::*; pub use { crate::vk::KHR_WIN32_KEYED_MUTEX_NAME as NAME, crate::vk::KHR_WIN32_KEYED_MUTEX_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_external_semaphore_capabilities"] pub mod external_semaphore_capabilities { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_NAME as NAME, crate::vk::KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_semaphore_capabilities instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_semaphore_capabilities instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_external_semaphore_properties_khr: PFN_vkGetPhysicalDeviceExternalSemaphoreProperties, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_external_semaphore_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_external_semaphore_properties_khr( _physical_device: PhysicalDevice, _p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo< '_, >, _p_external_semaphore_properties: *mut ExternalSemaphoreProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_semaphore_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_semaphore_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_semaphore"] pub mod external_semaphore { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_SEMAPHORE_NAME as NAME, crate::vk::KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_external_semaphore_win32"] pub mod external_semaphore_win32 { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_SEMAPHORE_WIN32_NAME as NAME, crate::vk::KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_semaphore_win32 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_semaphore_win32 device-level function pointers"] pub struct DeviceFn { pub import_semaphore_win32_handle_khr: PFN_vkImportSemaphoreWin32HandleKHR, pub get_semaphore_win32_handle_khr: PFN_vkGetSemaphoreWin32HandleKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { import_semaphore_win32_handle_khr: unsafe { unsafe extern "system" fn import_semaphore_win32_handle_khr( _device: crate::vk::Device, _p_import_semaphore_win32_handle_info : * const ImportSemaphoreWin32HandleInfoKHR < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(import_semaphore_win32_handle_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkImportSemaphoreWin32HandleKHR\0", ); let val = _f(cname); if val.is_null() { import_semaphore_win32_handle_khr } else { ::core::mem::transmute(val) } }, get_semaphore_win32_handle_khr: unsafe { unsafe extern "system" fn get_semaphore_win32_handle_khr( _device: crate::vk::Device, _p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR<'_>, _p_handle: *mut HANDLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_semaphore_win32_handle_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSemaphoreWin32HandleKHR\0"); let val = _f(cname); if val.is_null() { get_semaphore_win32_handle_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_semaphore_fd"] pub mod external_semaphore_fd { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_SEMAPHORE_FD_NAME as NAME, crate::vk::KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_semaphore_fd device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_semaphore_fd device-level function pointers"] pub struct DeviceFn { pub import_semaphore_fd_khr: PFN_vkImportSemaphoreFdKHR, pub get_semaphore_fd_khr: PFN_vkGetSemaphoreFdKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { import_semaphore_fd_khr: unsafe { unsafe extern "system" fn import_semaphore_fd_khr( _device: crate::vk::Device, _p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(import_semaphore_fd_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkImportSemaphoreFdKHR\0"); let val = _f(cname); if val.is_null() { import_semaphore_fd_khr } else { ::core::mem::transmute(val) } }, get_semaphore_fd_khr: unsafe { unsafe extern "system" fn get_semaphore_fd_khr( _device: crate::vk::Device, _p_get_fd_info: *const SemaphoreGetFdInfoKHR<'_>, _p_fd: *mut c_int, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_semaphore_fd_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSemaphoreFdKHR\0"); let val = _f(cname); if val.is_null() { get_semaphore_fd_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_push_descriptor"] pub mod push_descriptor { use super::super::*; pub use { crate::vk::KHR_PUSH_DESCRIPTOR_NAME as NAME, crate::vk::KHR_PUSH_DESCRIPTOR_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_push_descriptor device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_push_descriptor device-level function pointers"] pub struct DeviceFn { pub cmd_push_descriptor_set_khr: PFN_vkCmdPushDescriptorSetKHR, pub cmd_push_descriptor_set_with_template_khr: PFN_vkCmdPushDescriptorSetWithTemplateKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_push_descriptor_set_khr: unsafe { unsafe extern "system" fn cmd_push_descriptor_set_khr( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _set: u32, _descriptor_write_count: u32, _p_descriptor_writes: *const WriteDescriptorSet<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_descriptor_set_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPushDescriptorSetKHR\0"); let val = _f(cname); if val.is_null() { cmd_push_descriptor_set_khr } else { ::core::mem::transmute(val) } }, cmd_push_descriptor_set_with_template_khr: unsafe { unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr( _command_buffer: CommandBuffer, _descriptor_update_template: DescriptorUpdateTemplate, _layout: PipelineLayout, _set: u32, _p_data: *const c_void, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_descriptor_set_with_template_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdPushDescriptorSetWithTemplateKHR\0", ); let val = _f(cname); if val.is_null() { cmd_push_descriptor_set_with_template_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_float16_int8"] pub mod shader_float16_int8 { use super::super::*; pub use { crate::vk::KHR_SHADER_FLOAT16_INT8_NAME as NAME, crate::vk::KHR_SHADER_FLOAT16_INT8_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_16bit_storage"] pub mod _16bit_storage { use super::super::*; pub use { crate::vk::KHR_16BIT_STORAGE_NAME as NAME, crate::vk::KHR_16BIT_STORAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_incremental_present"] pub mod incremental_present { use super::super::*; pub use { crate::vk::KHR_INCREMENTAL_PRESENT_NAME as NAME, crate::vk::KHR_INCREMENTAL_PRESENT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_descriptor_update_template"] pub mod descriptor_update_template { use super::super::*; pub use { crate::vk::KHR_DESCRIPTOR_UPDATE_TEMPLATE_NAME as NAME, crate::vk::KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_descriptor_update_template device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_descriptor_update_template device-level function pointers"] pub struct DeviceFn { pub create_descriptor_update_template_khr: PFN_vkCreateDescriptorUpdateTemplate, pub destroy_descriptor_update_template_khr: PFN_vkDestroyDescriptorUpdateTemplate, pub update_descriptor_set_with_template_khr: PFN_vkUpdateDescriptorSetWithTemplate, pub cmd_push_descriptor_set_with_template_khr: PFN_vkCmdPushDescriptorSetWithTemplateKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_descriptor_update_template_khr: unsafe { unsafe extern "system" fn create_descriptor_update_template_khr( _device: crate::vk::Device, _p_create_info: *const DescriptorUpdateTemplateCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_descriptor_update_template: *mut DescriptorUpdateTemplate, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_descriptor_update_template_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateDescriptorUpdateTemplateKHR\0", ); let val = _f(cname); if val.is_null() { create_descriptor_update_template_khr } else { ::core::mem::transmute(val) } }, destroy_descriptor_update_template_khr: unsafe { unsafe extern "system" fn destroy_descriptor_update_template_khr( _device: crate::vk::Device, _descriptor_update_template: DescriptorUpdateTemplate, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_descriptor_update_template_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyDescriptorUpdateTemplateKHR\0", ); let val = _f(cname); if val.is_null() { destroy_descriptor_update_template_khr } else { ::core::mem::transmute(val) } }, update_descriptor_set_with_template_khr: unsafe { unsafe extern "system" fn update_descriptor_set_with_template_khr( _device: crate::vk::Device, _descriptor_set: DescriptorSet, _descriptor_update_template: DescriptorUpdateTemplate, _p_data: *const c_void, ) { panic!(concat!( "Unable to load ", stringify!(update_descriptor_set_with_template_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkUpdateDescriptorSetWithTemplateKHR\0", ); let val = _f(cname); if val.is_null() { update_descriptor_set_with_template_khr } else { ::core::mem::transmute(val) } }, cmd_push_descriptor_set_with_template_khr: unsafe { unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr( _command_buffer: CommandBuffer, _descriptor_update_template: DescriptorUpdateTemplate, _layout: PipelineLayout, _set: u32, _p_data: *const c_void, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_descriptor_set_with_template_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdPushDescriptorSetWithTemplateKHR\0", ); let val = _f(cname); if val.is_null() { cmd_push_descriptor_set_with_template_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_imageless_framebuffer"] pub mod imageless_framebuffer { use super::super::*; pub use { crate::vk::KHR_IMAGELESS_FRAMEBUFFER_NAME as NAME, crate::vk::KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_create_renderpass2"] pub mod create_renderpass2 { use super::super::*; pub use { crate::vk::KHR_CREATE_RENDERPASS2_NAME as NAME, crate::vk::KHR_CREATE_RENDERPASS2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_create_renderpass2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_create_renderpass2 device-level function pointers"] pub struct DeviceFn { pub create_render_pass2_khr: PFN_vkCreateRenderPass2, pub cmd_begin_render_pass2_khr: PFN_vkCmdBeginRenderPass2, pub cmd_next_subpass2_khr: PFN_vkCmdNextSubpass2, pub cmd_end_render_pass2_khr: PFN_vkCmdEndRenderPass2, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_render_pass2_khr: unsafe { unsafe extern "system" fn create_render_pass2_khr( _device: crate::vk::Device, _p_create_info: *const RenderPassCreateInfo2<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_render_pass: *mut RenderPass, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_render_pass2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass2KHR\0"); let val = _f(cname); if val.is_null() { create_render_pass2_khr } else { ::core::mem::transmute(val) } }, cmd_begin_render_pass2_khr: unsafe { unsafe extern "system" fn cmd_begin_render_pass2_khr( _command_buffer: CommandBuffer, _p_render_pass_begin: *const RenderPassBeginInfo<'_>, _p_subpass_begin_info: *const SubpassBeginInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_render_pass2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass2KHR\0"); let val = _f(cname); if val.is_null() { cmd_begin_render_pass2_khr } else { ::core::mem::transmute(val) } }, cmd_next_subpass2_khr: unsafe { unsafe extern "system" fn cmd_next_subpass2_khr( _command_buffer: CommandBuffer, _p_subpass_begin_info: *const SubpassBeginInfo<'_>, _p_subpass_end_info: *const SubpassEndInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_next_subpass2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass2KHR\0"); let val = _f(cname); if val.is_null() { cmd_next_subpass2_khr } else { ::core::mem::transmute(val) } }, cmd_end_render_pass2_khr: unsafe { unsafe extern "system" fn cmd_end_render_pass2_khr( _command_buffer: CommandBuffer, _p_subpass_end_info: *const SubpassEndInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_end_render_pass2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass2KHR\0"); let val = _f(cname); if val.is_null() { cmd_end_render_pass2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shared_presentable_image"] pub mod shared_presentable_image { use super::super::*; pub use { crate::vk::KHR_SHARED_PRESENTABLE_IMAGE_NAME as NAME, crate::vk::KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_shared_presentable_image device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_shared_presentable_image device-level function pointers"] pub struct DeviceFn { pub get_swapchain_status_khr: PFN_vkGetSwapchainStatusKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_swapchain_status_khr: unsafe { unsafe extern "system" fn get_swapchain_status_khr( _device: crate::vk::Device, _swapchain: SwapchainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_swapchain_status_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainStatusKHR\0"); let val = _f(cname); if val.is_null() { get_swapchain_status_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_fence_capabilities"] pub mod external_fence_capabilities { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_FENCE_CAPABILITIES_NAME as NAME, crate::vk::KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_fence_capabilities instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_fence_capabilities instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_external_fence_properties_khr: PFN_vkGetPhysicalDeviceExternalFenceProperties, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_external_fence_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_external_fence_properties_khr( _physical_device: PhysicalDevice, _p_external_fence_info: *const PhysicalDeviceExternalFenceInfo<'_>, _p_external_fence_properties: *mut ExternalFenceProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_fence_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalFencePropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_fence_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_fence"] pub mod external_fence { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_FENCE_NAME as NAME, crate::vk::KHR_EXTERNAL_FENCE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_external_fence_win32"] pub mod external_fence_win32 { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_FENCE_WIN32_NAME as NAME, crate::vk::KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_fence_win32 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_fence_win32 device-level function pointers"] pub struct DeviceFn { pub import_fence_win32_handle_khr: PFN_vkImportFenceWin32HandleKHR, pub get_fence_win32_handle_khr: PFN_vkGetFenceWin32HandleKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { import_fence_win32_handle_khr: unsafe { unsafe extern "system" fn import_fence_win32_handle_khr( _device: crate::vk::Device, _p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(import_fence_win32_handle_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkImportFenceWin32HandleKHR\0"); let val = _f(cname); if val.is_null() { import_fence_win32_handle_khr } else { ::core::mem::transmute(val) } }, get_fence_win32_handle_khr: unsafe { unsafe extern "system" fn get_fence_win32_handle_khr( _device: crate::vk::Device, _p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR<'_>, _p_handle: *mut HANDLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_fence_win32_handle_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetFenceWin32HandleKHR\0"); let val = _f(cname); if val.is_null() { get_fence_win32_handle_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_external_fence_fd"] pub mod external_fence_fd { use super::super::*; pub use { crate::vk::KHR_EXTERNAL_FENCE_FD_NAME as NAME, crate::vk::KHR_EXTERNAL_FENCE_FD_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_external_fence_fd device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_external_fence_fd device-level function pointers"] pub struct DeviceFn { pub import_fence_fd_khr: PFN_vkImportFenceFdKHR, pub get_fence_fd_khr: PFN_vkGetFenceFdKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { import_fence_fd_khr: unsafe { unsafe extern "system" fn import_fence_fd_khr( _device: crate::vk::Device, _p_import_fence_fd_info: *const ImportFenceFdInfoKHR<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(import_fence_fd_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkImportFenceFdKHR\0"); let val = _f(cname); if val.is_null() { import_fence_fd_khr } else { ::core::mem::transmute(val) } }, get_fence_fd_khr: unsafe { unsafe extern "system" fn get_fence_fd_khr( _device: crate::vk::Device, _p_get_fd_info: *const FenceGetFdInfoKHR<'_>, _p_fd: *mut c_int, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_fence_fd_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetFenceFdKHR\0"); let val = _f(cname); if val.is_null() { get_fence_fd_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_performance_query"] pub mod performance_query { use super::super::*; pub use { crate::vk::KHR_PERFORMANCE_QUERY_NAME as NAME, crate::vk::KHR_PERFORMANCE_QUERY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_performance_query instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_performance_query instance-level function pointers"] pub struct InstanceFn { pub enumerate_physical_device_queue_family_performance_query_counters_khr: PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR, pub get_physical_device_queue_family_performance_query_passes_khr: PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { enumerate_physical_device_queue_family_performance_query_counters_khr: unsafe { unsafe extern "system" fn enumerate_physical_device_queue_family_performance_query_counters_khr( _physical_device: PhysicalDevice, _queue_family_index: u32, _p_counter_count: *mut u32, _p_counters: *mut PerformanceCounterKHR<'_>, _p_counter_descriptions: *mut PerformanceCounterDescriptionKHR<'_>, ) -> Result { panic ! (concat ! ("Unable to load " , stringify ! (enumerate_physical_device_queue_family_performance_query_counters_khr))) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR\0", ); let val = _f(cname); if val.is_null() { enumerate_physical_device_queue_family_performance_query_counters_khr } else { ::core::mem::transmute(val) } }, get_physical_device_queue_family_performance_query_passes_khr: unsafe { unsafe extern "system" fn get_physical_device_queue_family_performance_query_passes_khr( _physical_device: PhysicalDevice, _p_performance_query_create_info : * const QueryPoolPerformanceCreateInfoKHR < '_ >, _p_num_passes: *mut u32, ) { panic!(concat!( "Unable to load ", stringify!( get_physical_device_queue_family_performance_query_passes_khr ) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_queue_family_performance_query_passes_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_performance_query device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_performance_query device-level function pointers"] pub struct DeviceFn { pub acquire_profiling_lock_khr: PFN_vkAcquireProfilingLockKHR, pub release_profiling_lock_khr: PFN_vkReleaseProfilingLockKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { acquire_profiling_lock_khr: unsafe { unsafe extern "system" fn acquire_profiling_lock_khr( _device: crate::vk::Device, _p_info: *const AcquireProfilingLockInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_profiling_lock_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireProfilingLockKHR\0"); let val = _f(cname); if val.is_null() { acquire_profiling_lock_khr } else { ::core::mem::transmute(val) } }, release_profiling_lock_khr: unsafe { unsafe extern "system" fn release_profiling_lock_khr( _device: crate::vk::Device, ) { panic!(concat!( "Unable to load ", stringify!(release_profiling_lock_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkReleaseProfilingLockKHR\0"); let val = _f(cname); if val.is_null() { release_profiling_lock_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_maintenance2"] pub mod maintenance2 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE2_NAME as NAME, crate::vk::KHR_MAINTENANCE2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_get_surface_capabilities2"] pub mod get_surface_capabilities2 { use super::super::*; pub use { crate::vk::KHR_GET_SURFACE_CAPABILITIES2_NAME as NAME, crate::vk::KHR_GET_SURFACE_CAPABILITIES2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_get_surface_capabilities2 instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_get_surface_capabilities2 instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_surface_capabilities2_khr: PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR, pub get_physical_device_surface_formats2_khr: PFN_vkGetPhysicalDeviceSurfaceFormats2KHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_surface_capabilities2_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_capabilities2_khr( _physical_device: PhysicalDevice, _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, _p_surface_capabilities: *mut SurfaceCapabilities2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_capabilities2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceCapabilities2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_capabilities2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_surface_formats2_khr: unsafe { unsafe extern "system" fn get_physical_device_surface_formats2_khr( _physical_device: PhysicalDevice, _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, _p_surface_format_count: *mut u32, _p_surface_formats: *mut SurfaceFormat2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_surface_formats2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSurfaceFormats2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_surface_formats2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_variable_pointers"] pub mod variable_pointers { use super::super::*; pub use { crate::vk::KHR_VARIABLE_POINTERS_NAME as NAME, crate::vk::KHR_VARIABLE_POINTERS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_get_display_properties2"] pub mod get_display_properties2 { use super::super::*; pub use { crate::vk::KHR_GET_DISPLAY_PROPERTIES2_NAME as NAME, crate::vk::KHR_GET_DISPLAY_PROPERTIES2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_get_display_properties2 instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_get_display_properties2 instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_display_properties2_khr: PFN_vkGetPhysicalDeviceDisplayProperties2KHR, pub get_physical_device_display_plane_properties2_khr: PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR, pub get_display_mode_properties2_khr: PFN_vkGetDisplayModeProperties2KHR, pub get_display_plane_capabilities2_khr: PFN_vkGetDisplayPlaneCapabilities2KHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_display_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_display_properties2_khr( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut DisplayProperties2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_display_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceDisplayProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_display_properties2_khr } else { ::core::mem::transmute(val) } }, get_physical_device_display_plane_properties2_khr: unsafe { unsafe extern "system" fn get_physical_device_display_plane_properties2_khr( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut DisplayPlaneProperties2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_display_plane_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceDisplayPlaneProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_display_plane_properties2_khr } else { ::core::mem::transmute(val) } }, get_display_mode_properties2_khr: unsafe { unsafe extern "system" fn get_display_mode_properties2_khr( _physical_device: PhysicalDevice, _display: DisplayKHR, _p_property_count: *mut u32, _p_properties: *mut DisplayModeProperties2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_display_mode_properties2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDisplayModeProperties2KHR\0", ); let val = _f(cname); if val.is_null() { get_display_mode_properties2_khr } else { ::core::mem::transmute(val) } }, get_display_plane_capabilities2_khr: unsafe { unsafe extern "system" fn get_display_plane_capabilities2_khr( _physical_device: PhysicalDevice, _p_display_plane_info: *const DisplayPlaneInfo2KHR<'_>, _p_capabilities: *mut DisplayPlaneCapabilities2KHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_display_plane_capabilities2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDisplayPlaneCapabilities2KHR\0", ); let val = _f(cname); if val.is_null() { get_display_plane_capabilities2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_dedicated_allocation"] pub mod dedicated_allocation { use super::super::*; pub use { crate::vk::KHR_DEDICATED_ALLOCATION_NAME as NAME, crate::vk::KHR_DEDICATED_ALLOCATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_storage_buffer_storage_class"] pub mod storage_buffer_storage_class { use super::super::*; pub use { crate::vk::KHR_STORAGE_BUFFER_STORAGE_CLASS_NAME as NAME, crate::vk::KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_relaxed_block_layout"] pub mod relaxed_block_layout { use super::super::*; pub use { crate::vk::KHR_RELAXED_BLOCK_LAYOUT_NAME as NAME, crate::vk::KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_get_memory_requirements2"] pub mod get_memory_requirements2 { use super::super::*; pub use { crate::vk::KHR_GET_MEMORY_REQUIREMENTS2_NAME as NAME, crate::vk::KHR_GET_MEMORY_REQUIREMENTS2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_get_memory_requirements2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_get_memory_requirements2 device-level function pointers"] pub struct DeviceFn { pub get_image_memory_requirements2_khr: PFN_vkGetImageMemoryRequirements2, pub get_buffer_memory_requirements2_khr: PFN_vkGetBufferMemoryRequirements2, pub get_image_sparse_memory_requirements2_khr: PFN_vkGetImageSparseMemoryRequirements2, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_image_memory_requirements2_khr: unsafe { unsafe extern "system" fn get_image_memory_requirements2_khr( _device: crate::vk::Device, _p_info: *const ImageMemoryRequirementsInfo2<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_memory_requirements2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageMemoryRequirements2KHR\0", ); let val = _f(cname); if val.is_null() { get_image_memory_requirements2_khr } else { ::core::mem::transmute(val) } }, get_buffer_memory_requirements2_khr: unsafe { unsafe extern "system" fn get_buffer_memory_requirements2_khr( _device: crate::vk::Device, _p_info: *const BufferMemoryRequirementsInfo2<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_buffer_memory_requirements2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetBufferMemoryRequirements2KHR\0", ); let val = _f(cname); if val.is_null() { get_buffer_memory_requirements2_khr } else { ::core::mem::transmute(val) } }, get_image_sparse_memory_requirements2_khr: unsafe { unsafe extern "system" fn get_image_sparse_memory_requirements2_khr( _device: crate::vk::Device, _p_info: *const ImageSparseMemoryRequirementsInfo2<'_>, _p_sparse_memory_requirement_count: *mut u32, _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_sparse_memory_requirements2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageSparseMemoryRequirements2KHR\0", ); let val = _f(cname); if val.is_null() { get_image_sparse_memory_requirements2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_image_format_list"] pub mod image_format_list { use super::super::*; pub use { crate::vk::KHR_IMAGE_FORMAT_LIST_NAME as NAME, crate::vk::KHR_IMAGE_FORMAT_LIST_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_acceleration_structure"] pub mod acceleration_structure { use super::super::*; pub use { crate::vk::KHR_ACCELERATION_STRUCTURE_NAME as NAME, crate::vk::KHR_ACCELERATION_STRUCTURE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_acceleration_structure device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_acceleration_structure device-level function pointers"] pub struct DeviceFn { pub create_acceleration_structure_khr: PFN_vkCreateAccelerationStructureKHR, pub destroy_acceleration_structure_khr: PFN_vkDestroyAccelerationStructureKHR, pub cmd_build_acceleration_structures_khr: PFN_vkCmdBuildAccelerationStructuresKHR, pub cmd_build_acceleration_structures_indirect_khr: PFN_vkCmdBuildAccelerationStructuresIndirectKHR, pub build_acceleration_structures_khr: PFN_vkBuildAccelerationStructuresKHR, pub copy_acceleration_structure_khr: PFN_vkCopyAccelerationStructureKHR, pub copy_acceleration_structure_to_memory_khr: PFN_vkCopyAccelerationStructureToMemoryKHR, pub copy_memory_to_acceleration_structure_khr: PFN_vkCopyMemoryToAccelerationStructureKHR, pub write_acceleration_structures_properties_khr: PFN_vkWriteAccelerationStructuresPropertiesKHR, pub cmd_copy_acceleration_structure_khr: PFN_vkCmdCopyAccelerationStructureKHR, pub cmd_copy_acceleration_structure_to_memory_khr: PFN_vkCmdCopyAccelerationStructureToMemoryKHR, pub cmd_copy_memory_to_acceleration_structure_khr: PFN_vkCmdCopyMemoryToAccelerationStructureKHR, pub get_acceleration_structure_device_address_khr: PFN_vkGetAccelerationStructureDeviceAddressKHR, pub cmd_write_acceleration_structures_properties_khr: PFN_vkCmdWriteAccelerationStructuresPropertiesKHR, pub get_device_acceleration_structure_compatibility_khr: PFN_vkGetDeviceAccelerationStructureCompatibilityKHR, pub get_acceleration_structure_build_sizes_khr: PFN_vkGetAccelerationStructureBuildSizesKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_acceleration_structure_khr: unsafe { unsafe extern "system" fn create_acceleration_structure_khr( _device: crate::vk::Device, _p_create_info: *const AccelerationStructureCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_acceleration_structure: *mut AccelerationStructureKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { create_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, destroy_acceleration_structure_khr: unsafe { unsafe extern "system" fn destroy_acceleration_structure_khr( _device: crate::vk::Device, _acceleration_structure: AccelerationStructureKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { destroy_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, cmd_build_acceleration_structures_khr: unsafe { unsafe extern "system" fn cmd_build_acceleration_structures_khr( _command_buffer: CommandBuffer, _info_count: u32, _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, _pp_build_range_infos : * const * const AccelerationStructureBuildRangeInfoKHR, ) { panic!(concat!( "Unable to load ", stringify!(cmd_build_acceleration_structures_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBuildAccelerationStructuresKHR\0", ); let val = _f(cname); if val.is_null() { cmd_build_acceleration_structures_khr } else { ::core::mem::transmute(val) } }, cmd_build_acceleration_structures_indirect_khr: unsafe { unsafe extern "system" fn cmd_build_acceleration_structures_indirect_khr( _command_buffer: CommandBuffer, _info_count: u32, _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, _p_indirect_device_addresses: *const DeviceAddress, _p_indirect_strides: *const u32, _pp_max_primitive_counts: *const *const u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_build_acceleration_structures_indirect_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBuildAccelerationStructuresIndirectKHR\0", ); let val = _f(cname); if val.is_null() { cmd_build_acceleration_structures_indirect_khr } else { ::core::mem::transmute(val) } }, build_acceleration_structures_khr: unsafe { unsafe extern "system" fn build_acceleration_structures_khr( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _info_count: u32, _p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, _pp_build_range_infos : * const * const AccelerationStructureBuildRangeInfoKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(build_acceleration_structures_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkBuildAccelerationStructuresKHR\0", ); let val = _f(cname); if val.is_null() { build_acceleration_structures_khr } else { ::core::mem::transmute(val) } }, copy_acceleration_structure_khr: unsafe { unsafe extern "system" fn copy_acceleration_structure_khr( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyAccelerationStructureInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCopyAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { copy_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, copy_acceleration_structure_to_memory_khr: unsafe { unsafe extern "system" fn copy_acceleration_structure_to_memory_khr( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_acceleration_structure_to_memory_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCopyAccelerationStructureToMemoryKHR\0", ); let val = _f(cname); if val.is_null() { copy_acceleration_structure_to_memory_khr } else { ::core::mem::transmute(val) } }, copy_memory_to_acceleration_structure_khr: unsafe { unsafe extern "system" fn copy_memory_to_acceleration_structure_khr( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(copy_memory_to_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCopyMemoryToAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { copy_memory_to_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, write_acceleration_structures_properties_khr: unsafe { unsafe extern "system" fn write_acceleration_structures_properties_khr( _device: crate::vk::Device, _acceleration_structure_count: u32, _p_acceleration_structures: *const AccelerationStructureKHR, _query_type: QueryType, _data_size: usize, _p_data: *mut c_void, _stride: usize, ) -> Result { panic!(concat!( "Unable to load ", stringify!(write_acceleration_structures_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkWriteAccelerationStructuresPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { write_acceleration_structures_properties_khr } else { ::core::mem::transmute(val) } }, cmd_copy_acceleration_structure_khr: unsafe { unsafe extern "system" fn cmd_copy_acceleration_structure_khr( _command_buffer: CommandBuffer, _p_info: *const CopyAccelerationStructureInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdCopyAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { cmd_copy_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, cmd_copy_acceleration_structure_to_memory_khr: unsafe { unsafe extern "system" fn cmd_copy_acceleration_structure_to_memory_khr( _command_buffer: CommandBuffer, _p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_acceleration_structure_to_memory_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdCopyAccelerationStructureToMemoryKHR\0", ); let val = _f(cname); if val.is_null() { cmd_copy_acceleration_structure_to_memory_khr } else { ::core::mem::transmute(val) } }, cmd_copy_memory_to_acceleration_structure_khr: unsafe { unsafe extern "system" fn cmd_copy_memory_to_acceleration_structure_khr( _command_buffer: CommandBuffer, _p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_memory_to_acceleration_structure_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdCopyMemoryToAccelerationStructureKHR\0", ); let val = _f(cname); if val.is_null() { cmd_copy_memory_to_acceleration_structure_khr } else { ::core::mem::transmute(val) } }, get_acceleration_structure_device_address_khr: unsafe { unsafe extern "system" fn get_acceleration_structure_device_address_khr( _device: crate::vk::Device, _p_info: *const AccelerationStructureDeviceAddressInfoKHR<'_>, ) -> DeviceAddress { panic!(concat!( "Unable to load ", stringify!(get_acceleration_structure_device_address_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAccelerationStructureDeviceAddressKHR\0", ); let val = _f(cname); if val.is_null() { get_acceleration_structure_device_address_khr } else { ::core::mem::transmute(val) } }, cmd_write_acceleration_structures_properties_khr: unsafe { unsafe extern "system" fn cmd_write_acceleration_structures_properties_khr( _command_buffer: CommandBuffer, _acceleration_structure_count: u32, _p_acceleration_structures: *const AccelerationStructureKHR, _query_type: QueryType, _query_pool: QueryPool, _first_query: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_acceleration_structures_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdWriteAccelerationStructuresPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { cmd_write_acceleration_structures_properties_khr } else { ::core::mem::transmute(val) } }, get_device_acceleration_structure_compatibility_khr: unsafe { unsafe extern "system" fn get_device_acceleration_structure_compatibility_khr( _device: crate::vk::Device, _p_version_info: *const AccelerationStructureVersionInfoKHR<'_>, _p_compatibility: *mut AccelerationStructureCompatibilityKHR, ) { panic!(concat!( "Unable to load ", stringify!(get_device_acceleration_structure_compatibility_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceAccelerationStructureCompatibilityKHR\0", ); let val = _f(cname); if val.is_null() { get_device_acceleration_structure_compatibility_khr } else { ::core::mem::transmute(val) } }, get_acceleration_structure_build_sizes_khr: unsafe { unsafe extern "system" fn get_acceleration_structure_build_sizes_khr( _device: crate::vk::Device, _build_type: AccelerationStructureBuildTypeKHR, _p_build_info: *const AccelerationStructureBuildGeometryInfoKHR<'_>, _p_max_primitive_counts: *const u32, _p_size_info: *mut AccelerationStructureBuildSizesInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_acceleration_structure_build_sizes_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAccelerationStructureBuildSizesKHR\0", ); let val = _f(cname); if val.is_null() { get_acceleration_structure_build_sizes_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_ray_tracing_pipeline"] pub mod ray_tracing_pipeline { use super::super::*; pub use { crate::vk::KHR_RAY_TRACING_PIPELINE_NAME as NAME, crate::vk::KHR_RAY_TRACING_PIPELINE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_ray_tracing_pipeline device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_ray_tracing_pipeline device-level function pointers"] pub struct DeviceFn { pub cmd_trace_rays_khr: PFN_vkCmdTraceRaysKHR, pub create_ray_tracing_pipelines_khr: PFN_vkCreateRayTracingPipelinesKHR, pub get_ray_tracing_shader_group_handles_khr: PFN_vkGetRayTracingShaderGroupHandlesKHR, pub get_ray_tracing_capture_replay_shader_group_handles_khr: PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR, pub cmd_trace_rays_indirect_khr: PFN_vkCmdTraceRaysIndirectKHR, pub get_ray_tracing_shader_group_stack_size_khr: PFN_vkGetRayTracingShaderGroupStackSizeKHR, pub cmd_set_ray_tracing_pipeline_stack_size_khr: PFN_vkCmdSetRayTracingPipelineStackSizeKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_trace_rays_khr: unsafe { unsafe extern "system" fn cmd_trace_rays_khr( _command_buffer: CommandBuffer, _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR, _width: u32, _height: u32, _depth: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysKHR\0"); let val = _f(cname); if val.is_null() { cmd_trace_rays_khr } else { ::core::mem::transmute(val) } }, create_ray_tracing_pipelines_khr: unsafe { unsafe extern "system" fn create_ray_tracing_pipelines_khr( _device: crate::vk::Device, _deferred_operation: DeferredOperationKHR, _pipeline_cache: PipelineCache, _create_info_count: u32, _p_create_infos: *const RayTracingPipelineCreateInfoKHR<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipelines: *mut Pipeline, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_ray_tracing_pipelines_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateRayTracingPipelinesKHR\0", ); let val = _f(cname); if val.is_null() { create_ray_tracing_pipelines_khr } else { ::core::mem::transmute(val) } }, get_ray_tracing_shader_group_handles_khr: unsafe { unsafe extern "system" fn get_ray_tracing_shader_group_handles_khr( _device: crate::vk::Device, _pipeline: Pipeline, _first_group: u32, _group_count: u32, _data_size: usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_ray_tracing_shader_group_handles_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRayTracingShaderGroupHandlesKHR\0", ); let val = _f(cname); if val.is_null() { get_ray_tracing_shader_group_handles_khr } else { ::core::mem::transmute(val) } }, get_ray_tracing_capture_replay_shader_group_handles_khr: unsafe { unsafe extern "system" fn get_ray_tracing_capture_replay_shader_group_handles_khr( _device: crate::vk::Device, _pipeline: Pipeline, _first_group: u32, _group_count: u32, _data_size: usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_ray_tracing_capture_replay_shader_group_handles_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRayTracingCaptureReplayShaderGroupHandlesKHR\0", ); let val = _f(cname); if val.is_null() { get_ray_tracing_capture_replay_shader_group_handles_khr } else { ::core::mem::transmute(val) } }, cmd_trace_rays_indirect_khr: unsafe { unsafe extern "system" fn cmd_trace_rays_indirect_khr( _command_buffer: CommandBuffer, _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR, _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR, _indirect_device_address: DeviceAddress, ) { panic!(concat!( "Unable to load ", stringify!(cmd_trace_rays_indirect_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysIndirectKHR\0"); let val = _f(cname); if val.is_null() { cmd_trace_rays_indirect_khr } else { ::core::mem::transmute(val) } }, get_ray_tracing_shader_group_stack_size_khr: unsafe { unsafe extern "system" fn get_ray_tracing_shader_group_stack_size_khr( _device: crate::vk::Device, _pipeline: Pipeline, _group: u32, _group_shader: ShaderGroupShaderKHR, ) -> DeviceSize { panic!(concat!( "Unable to load ", stringify!(get_ray_tracing_shader_group_stack_size_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRayTracingShaderGroupStackSizeKHR\0", ); let val = _f(cname); if val.is_null() { get_ray_tracing_shader_group_stack_size_khr } else { ::core::mem::transmute(val) } }, cmd_set_ray_tracing_pipeline_stack_size_khr: unsafe { unsafe extern "system" fn cmd_set_ray_tracing_pipeline_stack_size_khr( _command_buffer: CommandBuffer, _pipeline_stack_size: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_ray_tracing_pipeline_stack_size_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRayTracingPipelineStackSizeKHR\0", ); let val = _f(cname); if val.is_null() { cmd_set_ray_tracing_pipeline_stack_size_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_ray_query"] pub mod ray_query { use super::super::*; pub use { crate::vk::KHR_RAY_QUERY_NAME as NAME, crate::vk::KHR_RAY_QUERY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_sampler_ycbcr_conversion"] pub mod sampler_ycbcr_conversion { use super::super::*; pub use { crate::vk::KHR_SAMPLER_YCBCR_CONVERSION_NAME as NAME, crate::vk::KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_sampler_ycbcr_conversion device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_sampler_ycbcr_conversion device-level function pointers"] pub struct DeviceFn { pub create_sampler_ycbcr_conversion_khr: PFN_vkCreateSamplerYcbcrConversion, pub destroy_sampler_ycbcr_conversion_khr: PFN_vkDestroySamplerYcbcrConversion, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_sampler_ycbcr_conversion_khr: unsafe { unsafe extern "system" fn create_sampler_ycbcr_conversion_khr( _device: crate::vk::Device, _p_create_info: *const SamplerYcbcrConversionCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_ycbcr_conversion: *mut SamplerYcbcrConversion, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_sampler_ycbcr_conversion_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateSamplerYcbcrConversionKHR\0", ); let val = _f(cname); if val.is_null() { create_sampler_ycbcr_conversion_khr } else { ::core::mem::transmute(val) } }, destroy_sampler_ycbcr_conversion_khr: unsafe { unsafe extern "system" fn destroy_sampler_ycbcr_conversion_khr( _device: crate::vk::Device, _ycbcr_conversion: SamplerYcbcrConversion, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_sampler_ycbcr_conversion_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroySamplerYcbcrConversionKHR\0", ); let val = _f(cname); if val.is_null() { destroy_sampler_ycbcr_conversion_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_bind_memory2"] pub mod bind_memory2 { use super::super::*; pub use { crate::vk::KHR_BIND_MEMORY2_NAME as NAME, crate::vk::KHR_BIND_MEMORY2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_bind_memory2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_bind_memory2 device-level function pointers"] pub struct DeviceFn { pub bind_buffer_memory2_khr: PFN_vkBindBufferMemory2, pub bind_image_memory2_khr: PFN_vkBindImageMemory2, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { bind_buffer_memory2_khr: unsafe { unsafe extern "system" fn bind_buffer_memory2_khr( _device: crate::vk::Device, _bind_info_count: u32, _p_bind_infos: *const BindBufferMemoryInfo<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(bind_buffer_memory2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory2KHR\0"); let val = _f(cname); if val.is_null() { bind_buffer_memory2_khr } else { ::core::mem::transmute(val) } }, bind_image_memory2_khr: unsafe { unsafe extern "system" fn bind_image_memory2_khr( _device: crate::vk::Device, _bind_info_count: u32, _p_bind_infos: *const BindImageMemoryInfo<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(bind_image_memory2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory2KHR\0"); let val = _f(cname); if val.is_null() { bind_image_memory2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_portability_subset"] pub mod portability_subset { use super::super::*; pub use { crate::vk::KHR_PORTABILITY_SUBSET_NAME as NAME, crate::vk::KHR_PORTABILITY_SUBSET_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_maintenance3"] pub mod maintenance3 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE3_NAME as NAME, crate::vk::KHR_MAINTENANCE3_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_maintenance3 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_maintenance3 device-level function pointers"] pub struct DeviceFn { pub get_descriptor_set_layout_support_khr: PFN_vkGetDescriptorSetLayoutSupport, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_descriptor_set_layout_support_khr: unsafe { unsafe extern "system" fn get_descriptor_set_layout_support_khr( _device: crate::vk::Device, _p_create_info: *const DescriptorSetLayoutCreateInfo<'_>, _p_support: *mut DescriptorSetLayoutSupport<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_layout_support_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDescriptorSetLayoutSupportKHR\0", ); let val = _f(cname); if val.is_null() { get_descriptor_set_layout_support_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_draw_indirect_count"] pub mod draw_indirect_count { use super::super::*; pub use { crate::vk::KHR_DRAW_INDIRECT_COUNT_NAME as NAME, crate::vk::KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_draw_indirect_count device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_draw_indirect_count device-level function pointers"] pub struct DeviceFn { pub cmd_draw_indirect_count_khr: PFN_vkCmdDrawIndirectCount, pub cmd_draw_indexed_indirect_count_khr: PFN_vkCmdDrawIndexedIndirectCount, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_indirect_count_khr: unsafe { unsafe extern "system" fn cmd_draw_indirect_count_khr( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indirect_count_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCountKHR\0"); let val = _f(cname); if val.is_null() { cmd_draw_indirect_count_khr } else { ::core::mem::transmute(val) } }, cmd_draw_indexed_indirect_count_khr: unsafe { unsafe extern "system" fn cmd_draw_indexed_indirect_count_khr( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indexed_indirect_count_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDrawIndexedIndirectCountKHR\0", ); let val = _f(cname); if val.is_null() { cmd_draw_indexed_indirect_count_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_subgroup_extended_types"] pub mod shader_subgroup_extended_types { use super::super::*; pub use { crate::vk::KHR_SHADER_SUBGROUP_EXTENDED_TYPES_NAME as NAME, crate::vk::KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_8bit_storage"] pub mod _8bit_storage { use super::super::*; pub use { crate::vk::KHR_8BIT_STORAGE_NAME as NAME, crate::vk::KHR_8BIT_STORAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_atomic_int64"] pub mod shader_atomic_int64 { use super::super::*; pub use { crate::vk::KHR_SHADER_ATOMIC_INT64_NAME as NAME, crate::vk::KHR_SHADER_ATOMIC_INT64_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_clock"] pub mod shader_clock { use super::super::*; pub use { crate::vk::KHR_SHADER_CLOCK_NAME as NAME, crate::vk::KHR_SHADER_CLOCK_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_decode_h265"] pub mod video_decode_h265 { use super::super::*; pub use { crate::vk::KHR_VIDEO_DECODE_H265_NAME as NAME, crate::vk::KHR_VIDEO_DECODE_H265_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_global_priority"] pub mod global_priority { use super::super::*; pub use { crate::vk::KHR_GLOBAL_PRIORITY_NAME as NAME, crate::vk::KHR_GLOBAL_PRIORITY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_driver_properties"] pub mod driver_properties { use super::super::*; pub use { crate::vk::KHR_DRIVER_PROPERTIES_NAME as NAME, crate::vk::KHR_DRIVER_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_float_controls"] pub mod shader_float_controls { use super::super::*; pub use { crate::vk::KHR_SHADER_FLOAT_CONTROLS_NAME as NAME, crate::vk::KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_depth_stencil_resolve"] pub mod depth_stencil_resolve { use super::super::*; pub use { crate::vk::KHR_DEPTH_STENCIL_RESOLVE_NAME as NAME, crate::vk::KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_swapchain_mutable_format"] pub mod swapchain_mutable_format { use super::super::*; pub use { crate::vk::KHR_SWAPCHAIN_MUTABLE_FORMAT_NAME as NAME, crate::vk::KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_timeline_semaphore"] pub mod timeline_semaphore { use super::super::*; pub use { crate::vk::KHR_TIMELINE_SEMAPHORE_NAME as NAME, crate::vk::KHR_TIMELINE_SEMAPHORE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_timeline_semaphore device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_timeline_semaphore device-level function pointers"] pub struct DeviceFn { pub get_semaphore_counter_value_khr: PFN_vkGetSemaphoreCounterValue, pub wait_semaphores_khr: PFN_vkWaitSemaphores, pub signal_semaphore_khr: PFN_vkSignalSemaphore, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_semaphore_counter_value_khr: unsafe { unsafe extern "system" fn get_semaphore_counter_value_khr( _device: crate::vk::Device, _semaphore: Semaphore, _p_value: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_semaphore_counter_value_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSemaphoreCounterValueKHR\0"); let val = _f(cname); if val.is_null() { get_semaphore_counter_value_khr } else { ::core::mem::transmute(val) } }, wait_semaphores_khr: unsafe { unsafe extern "system" fn wait_semaphores_khr( _device: crate::vk::Device, _p_wait_info: *const SemaphoreWaitInfo<'_>, _timeout: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(wait_semaphores_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkWaitSemaphoresKHR\0"); let val = _f(cname); if val.is_null() { wait_semaphores_khr } else { ::core::mem::transmute(val) } }, signal_semaphore_khr: unsafe { unsafe extern "system" fn signal_semaphore_khr( _device: crate::vk::Device, _p_signal_info: *const SemaphoreSignalInfo<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(signal_semaphore_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSignalSemaphoreKHR\0"); let val = _f(cname); if val.is_null() { signal_semaphore_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_vulkan_memory_model"] pub mod vulkan_memory_model { use super::super::*; pub use { crate::vk::KHR_VULKAN_MEMORY_MODEL_NAME as NAME, crate::vk::KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_terminate_invocation"] pub mod shader_terminate_invocation { use super::super::*; pub use { crate::vk::KHR_SHADER_TERMINATE_INVOCATION_NAME as NAME, crate::vk::KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_fragment_shading_rate"] pub mod fragment_shading_rate { use super::super::*; pub use { crate::vk::KHR_FRAGMENT_SHADING_RATE_NAME as NAME, crate::vk::KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_fragment_shading_rate instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_fragment_shading_rate instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_fragment_shading_rates_khr: PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_fragment_shading_rates_khr: unsafe { unsafe extern "system" fn get_physical_device_fragment_shading_rates_khr( _physical_device: PhysicalDevice, _p_fragment_shading_rate_count: *mut u32, _p_fragment_shading_rates: *mut PhysicalDeviceFragmentShadingRateKHR< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_fragment_shading_rates_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceFragmentShadingRatesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_fragment_shading_rates_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_fragment_shading_rate device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_fragment_shading_rate device-level function pointers"] pub struct DeviceFn { pub cmd_set_fragment_shading_rate_khr: PFN_vkCmdSetFragmentShadingRateKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_fragment_shading_rate_khr: unsafe { unsafe extern "system" fn cmd_set_fragment_shading_rate_khr( _command_buffer: CommandBuffer, _p_fragment_size: *const Extent2D, _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize], ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_fragment_shading_rate_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetFragmentShadingRateKHR\0", ); let val = _f(cname); if val.is_null() { cmd_set_fragment_shading_rate_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_dynamic_rendering_local_read"] pub mod dynamic_rendering_local_read { use super::super::*; pub use { crate::vk::KHR_DYNAMIC_RENDERING_LOCAL_READ_NAME as NAME, crate::vk::KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_dynamic_rendering_local_read device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_dynamic_rendering_local_read device-level function pointers"] pub struct DeviceFn { pub cmd_set_rendering_attachment_locations_khr: PFN_vkCmdSetRenderingAttachmentLocationsKHR, pub cmd_set_rendering_input_attachment_indices_khr: PFN_vkCmdSetRenderingInputAttachmentIndicesKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_rendering_attachment_locations_khr: unsafe { unsafe extern "system" fn cmd_set_rendering_attachment_locations_khr( _command_buffer: CommandBuffer, _p_location_info: *const RenderingAttachmentLocationInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rendering_attachment_locations_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRenderingAttachmentLocationsKHR\0", ); let val = _f(cname); if val.is_null() { cmd_set_rendering_attachment_locations_khr } else { ::core::mem::transmute(val) } }, cmd_set_rendering_input_attachment_indices_khr: unsafe { unsafe extern "system" fn cmd_set_rendering_input_attachment_indices_khr( _command_buffer: CommandBuffer, _p_location_info: *const RenderingInputAttachmentIndexInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rendering_input_attachment_indices_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetRenderingInputAttachmentIndicesKHR\0", ); let val = _f(cname); if val.is_null() { cmd_set_rendering_input_attachment_indices_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_quad_control"] pub mod shader_quad_control { use super::super::*; pub use { crate::vk::KHR_SHADER_QUAD_CONTROL_NAME as NAME, crate::vk::KHR_SHADER_QUAD_CONTROL_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_spirv_1_4"] pub mod spirv_1_4 { use super::super::*; pub use { crate::vk::KHR_SPIRV_1_4_NAME as NAME, crate::vk::KHR_SPIRV_1_4_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_surface_protected_capabilities"] pub mod surface_protected_capabilities { use super::super::*; pub use { crate::vk::KHR_SURFACE_PROTECTED_CAPABILITIES_NAME as NAME, crate::vk::KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_separate_depth_stencil_layouts"] pub mod separate_depth_stencil_layouts { use super::super::*; pub use { crate::vk::KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_NAME as NAME, crate::vk::KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_present_wait"] pub mod present_wait { use super::super::*; pub use { crate::vk::KHR_PRESENT_WAIT_NAME as NAME, crate::vk::KHR_PRESENT_WAIT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_present_wait device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_present_wait device-level function pointers"] pub struct DeviceFn { pub wait_for_present_khr: PFN_vkWaitForPresentKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { wait_for_present_khr: unsafe { unsafe extern "system" fn wait_for_present_khr( _device: crate::vk::Device, _swapchain: SwapchainKHR, _present_id: u64, _timeout: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(wait_for_present_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkWaitForPresentKHR\0"); let val = _f(cname); if val.is_null() { wait_for_present_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_uniform_buffer_standard_layout"] pub mod uniform_buffer_standard_layout { use super::super::*; pub use { crate::vk::KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_NAME as NAME, crate::vk::KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_buffer_device_address"] pub mod buffer_device_address { use super::super::*; pub use { crate::vk::KHR_BUFFER_DEVICE_ADDRESS_NAME as NAME, crate::vk::KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_buffer_device_address device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_buffer_device_address device-level function pointers"] pub struct DeviceFn { pub get_buffer_device_address_khr: PFN_vkGetBufferDeviceAddress, pub get_buffer_opaque_capture_address_khr: PFN_vkGetBufferOpaqueCaptureAddress, pub get_device_memory_opaque_capture_address_khr: PFN_vkGetDeviceMemoryOpaqueCaptureAddress, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_buffer_device_address_khr: unsafe { unsafe extern "system" fn get_buffer_device_address_khr( _device: crate::vk::Device, _p_info: *const BufferDeviceAddressInfo<'_>, ) -> DeviceAddress { panic!(concat!( "Unable to load ", stringify!(get_buffer_device_address_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferDeviceAddressKHR\0"); let val = _f(cname); if val.is_null() { get_buffer_device_address_khr } else { ::core::mem::transmute(val) } }, get_buffer_opaque_capture_address_khr: unsafe { unsafe extern "system" fn get_buffer_opaque_capture_address_khr( _device: crate::vk::Device, _p_info: *const BufferDeviceAddressInfo<'_>, ) -> u64 { panic!(concat!( "Unable to load ", stringify!(get_buffer_opaque_capture_address_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetBufferOpaqueCaptureAddressKHR\0", ); let val = _f(cname); if val.is_null() { get_buffer_opaque_capture_address_khr } else { ::core::mem::transmute(val) } }, get_device_memory_opaque_capture_address_khr: unsafe { unsafe extern "system" fn get_device_memory_opaque_capture_address_khr( _device: crate::vk::Device, _p_info: *const DeviceMemoryOpaqueCaptureAddressInfo<'_>, ) -> u64 { panic!(concat!( "Unable to load ", stringify!(get_device_memory_opaque_capture_address_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceMemoryOpaqueCaptureAddressKHR\0", ); let val = _f(cname); if val.is_null() { get_device_memory_opaque_capture_address_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_deferred_host_operations"] pub mod deferred_host_operations { use super::super::*; pub use { crate::vk::KHR_DEFERRED_HOST_OPERATIONS_NAME as NAME, crate::vk::KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_deferred_host_operations device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_deferred_host_operations device-level function pointers"] pub struct DeviceFn { pub create_deferred_operation_khr: PFN_vkCreateDeferredOperationKHR, pub destroy_deferred_operation_khr: PFN_vkDestroyDeferredOperationKHR, pub get_deferred_operation_max_concurrency_khr: PFN_vkGetDeferredOperationMaxConcurrencyKHR, pub get_deferred_operation_result_khr: PFN_vkGetDeferredOperationResultKHR, pub deferred_operation_join_khr: PFN_vkDeferredOperationJoinKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_deferred_operation_khr: unsafe { unsafe extern "system" fn create_deferred_operation_khr( _device: crate::vk::Device, _p_allocator: *const AllocationCallbacks<'_>, _p_deferred_operation: *mut DeferredOperationKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_deferred_operation_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDeferredOperationKHR\0"); let val = _f(cname); if val.is_null() { create_deferred_operation_khr } else { ::core::mem::transmute(val) } }, destroy_deferred_operation_khr: unsafe { unsafe extern "system" fn destroy_deferred_operation_khr( _device: crate::vk::Device, _operation: DeferredOperationKHR, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_deferred_operation_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyDeferredOperationKHR\0"); let val = _f(cname); if val.is_null() { destroy_deferred_operation_khr } else { ::core::mem::transmute(val) } }, get_deferred_operation_max_concurrency_khr: unsafe { unsafe extern "system" fn get_deferred_operation_max_concurrency_khr( _device: crate::vk::Device, _operation: DeferredOperationKHR, ) -> u32 { panic!(concat!( "Unable to load ", stringify!(get_deferred_operation_max_concurrency_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeferredOperationMaxConcurrencyKHR\0", ); let val = _f(cname); if val.is_null() { get_deferred_operation_max_concurrency_khr } else { ::core::mem::transmute(val) } }, get_deferred_operation_result_khr: unsafe { unsafe extern "system" fn get_deferred_operation_result_khr( _device: crate::vk::Device, _operation: DeferredOperationKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_deferred_operation_result_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeferredOperationResultKHR\0", ); let val = _f(cname); if val.is_null() { get_deferred_operation_result_khr } else { ::core::mem::transmute(val) } }, deferred_operation_join_khr: unsafe { unsafe extern "system" fn deferred_operation_join_khr( _device: crate::vk::Device, _operation: DeferredOperationKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(deferred_operation_join_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDeferredOperationJoinKHR\0"); let val = _f(cname); if val.is_null() { deferred_operation_join_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_pipeline_executable_properties"] pub mod pipeline_executable_properties { use super::super::*; pub use { crate::vk::KHR_PIPELINE_EXECUTABLE_PROPERTIES_NAME as NAME, crate::vk::KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_pipeline_executable_properties device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_pipeline_executable_properties device-level function pointers"] pub struct DeviceFn { pub get_pipeline_executable_properties_khr: PFN_vkGetPipelineExecutablePropertiesKHR, pub get_pipeline_executable_statistics_khr: PFN_vkGetPipelineExecutableStatisticsKHR, pub get_pipeline_executable_internal_representations_khr: PFN_vkGetPipelineExecutableInternalRepresentationsKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_pipeline_executable_properties_khr: unsafe { unsafe extern "system" fn get_pipeline_executable_properties_khr( _device: crate::vk::Device, _p_pipeline_info: *const PipelineInfoKHR<'_>, _p_executable_count: *mut u32, _p_properties: *mut PipelineExecutablePropertiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_pipeline_executable_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPipelineExecutablePropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_pipeline_executable_properties_khr } else { ::core::mem::transmute(val) } }, get_pipeline_executable_statistics_khr: unsafe { unsafe extern "system" fn get_pipeline_executable_statistics_khr( _device: crate::vk::Device, _p_executable_info: *const PipelineExecutableInfoKHR<'_>, _p_statistic_count: *mut u32, _p_statistics: *mut PipelineExecutableStatisticKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_pipeline_executable_statistics_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPipelineExecutableStatisticsKHR\0", ); let val = _f(cname); if val.is_null() { get_pipeline_executable_statistics_khr } else { ::core::mem::transmute(val) } }, get_pipeline_executable_internal_representations_khr: unsafe { unsafe extern "system" fn get_pipeline_executable_internal_representations_khr( _device: crate::vk::Device, _p_executable_info: *const PipelineExecutableInfoKHR<'_>, _p_internal_representation_count: *mut u32, _p_internal_representations : * mut PipelineExecutableInternalRepresentationKHR < '_ >, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_pipeline_executable_internal_representations_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPipelineExecutableInternalRepresentationsKHR\0", ); let val = _f(cname); if val.is_null() { get_pipeline_executable_internal_representations_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_map_memory2"] pub mod map_memory2 { use super::super::*; pub use { crate::vk::KHR_MAP_MEMORY2_NAME as NAME, crate::vk::KHR_MAP_MEMORY2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_map_memory2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_map_memory2 device-level function pointers"] pub struct DeviceFn { pub map_memory2_khr: PFN_vkMapMemory2KHR, pub unmap_memory2_khr: PFN_vkUnmapMemory2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { map_memory2_khr: unsafe { unsafe extern "system" fn map_memory2_khr( _device: crate::vk::Device, _p_memory_map_info: *const MemoryMapInfoKHR<'_>, _pp_data: *mut *mut c_void, ) -> Result { panic!(concat!("Unable to load ", stringify!(map_memory2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkMapMemory2KHR\0"); let val = _f(cname); if val.is_null() { map_memory2_khr } else { ::core::mem::transmute(val) } }, unmap_memory2_khr: unsafe { unsafe extern "system" fn unmap_memory2_khr( _device: crate::vk::Device, _p_memory_unmap_info: *const MemoryUnmapInfoKHR<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(unmap_memory2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkUnmapMemory2KHR\0"); let val = _f(cname); if val.is_null() { unmap_memory2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_integer_dot_product"] pub mod shader_integer_dot_product { use super::super::*; pub use { crate::vk::KHR_SHADER_INTEGER_DOT_PRODUCT_NAME as NAME, crate::vk::KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_pipeline_library"] pub mod pipeline_library { use super::super::*; pub use { crate::vk::KHR_PIPELINE_LIBRARY_NAME as NAME, crate::vk::KHR_PIPELINE_LIBRARY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_non_semantic_info"] pub mod shader_non_semantic_info { use super::super::*; pub use { crate::vk::KHR_SHADER_NON_SEMANTIC_INFO_NAME as NAME, crate::vk::KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_present_id"] pub mod present_id { use super::super::*; pub use { crate::vk::KHR_PRESENT_ID_NAME as NAME, crate::vk::KHR_PRESENT_ID_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_encode_queue"] pub mod video_encode_queue { use super::super::*; pub use { crate::vk::KHR_VIDEO_ENCODE_QUEUE_NAME as NAME, crate::vk::KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_video_encode_queue instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_video_encode_queue instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_video_encode_quality_level_properties_khr: PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_video_encode_quality_level_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_video_encode_quality_level_properties_khr( _physical_device: PhysicalDevice, _p_quality_level_info : * const PhysicalDeviceVideoEncodeQualityLevelInfoKHR < '_ >, _p_quality_level_properties: *mut VideoEncodeQualityLevelPropertiesKHR< '_, >, ) -> Result { panic!(concat!( "Unable to load ", stringify!( get_physical_device_video_encode_quality_level_properties_khr ) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_video_encode_quality_level_properties_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_video_encode_queue device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_video_encode_queue device-level function pointers"] pub struct DeviceFn { pub get_encoded_video_session_parameters_khr: PFN_vkGetEncodedVideoSessionParametersKHR, pub cmd_encode_video_khr: PFN_vkCmdEncodeVideoKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_encoded_video_session_parameters_khr: unsafe { unsafe extern "system" fn get_encoded_video_session_parameters_khr( _device: crate::vk::Device, _p_video_session_parameters_info : * const VideoEncodeSessionParametersGetInfoKHR < '_ >, _p_feedback_info: *mut VideoEncodeSessionParametersFeedbackInfoKHR<'_>, _p_data_size: *mut usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_encoded_video_session_parameters_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetEncodedVideoSessionParametersKHR\0", ); let val = _f(cname); if val.is_null() { get_encoded_video_session_parameters_khr } else { ::core::mem::transmute(val) } }, cmd_encode_video_khr: unsafe { unsafe extern "system" fn cmd_encode_video_khr( _command_buffer: CommandBuffer, _p_encode_info: *const VideoEncodeInfoKHR<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_encode_video_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEncodeVideoKHR\0"); let val = _f(cname); if val.is_null() { cmd_encode_video_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_synchronization2"] pub mod synchronization2 { use super::super::*; pub use { crate::vk::KHR_SYNCHRONIZATION2_NAME as NAME, crate::vk::KHR_SYNCHRONIZATION2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_synchronization2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_synchronization2 device-level function pointers"] pub struct DeviceFn { pub cmd_set_event2_khr: PFN_vkCmdSetEvent2, pub cmd_reset_event2_khr: PFN_vkCmdResetEvent2, pub cmd_wait_events2_khr: PFN_vkCmdWaitEvents2, pub cmd_pipeline_barrier2_khr: PFN_vkCmdPipelineBarrier2, pub cmd_write_timestamp2_khr: PFN_vkCmdWriteTimestamp2, pub queue_submit2_khr: PFN_vkQueueSubmit2, pub cmd_write_buffer_marker2_amd: PFN_vkCmdWriteBufferMarker2AMD, pub get_queue_checkpoint_data2_nv: PFN_vkGetQueueCheckpointData2NV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_event2_khr: unsafe { unsafe extern "system" fn cmd_set_event2_khr( _command_buffer: CommandBuffer, _event: Event, _p_dependency_info: *const DependencyInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_event2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent2KHR\0"); let val = _f(cname); if val.is_null() { cmd_set_event2_khr } else { ::core::mem::transmute(val) } }, cmd_reset_event2_khr: unsafe { unsafe extern "system" fn cmd_reset_event2_khr( _command_buffer: CommandBuffer, _event: Event, _stage_mask: PipelineStageFlags2, ) { panic!(concat!("Unable to load ", stringify!(cmd_reset_event2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent2KHR\0"); let val = _f(cname); if val.is_null() { cmd_reset_event2_khr } else { ::core::mem::transmute(val) } }, cmd_wait_events2_khr: unsafe { unsafe extern "system" fn cmd_wait_events2_khr( _command_buffer: CommandBuffer, _event_count: u32, _p_events: *const Event, _p_dependency_infos: *const DependencyInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_wait_events2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents2KHR\0"); let val = _f(cname); if val.is_null() { cmd_wait_events2_khr } else { ::core::mem::transmute(val) } }, cmd_pipeline_barrier2_khr: unsafe { unsafe extern "system" fn cmd_pipeline_barrier2_khr( _command_buffer: CommandBuffer, _p_dependency_info: *const DependencyInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_pipeline_barrier2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier2KHR\0"); let val = _f(cname); if val.is_null() { cmd_pipeline_barrier2_khr } else { ::core::mem::transmute(val) } }, cmd_write_timestamp2_khr: unsafe { unsafe extern "system" fn cmd_write_timestamp2_khr( _command_buffer: CommandBuffer, _stage: PipelineStageFlags2, _query_pool: QueryPool, _query: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_timestamp2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp2KHR\0"); let val = _f(cname); if val.is_null() { cmd_write_timestamp2_khr } else { ::core::mem::transmute(val) } }, queue_submit2_khr: unsafe { unsafe extern "system" fn queue_submit2_khr( _queue: Queue, _submit_count: u32, _p_submits: *const SubmitInfo2<'_>, _fence: Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(queue_submit2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit2KHR\0"); let val = _f(cname); if val.is_null() { queue_submit2_khr } else { ::core::mem::transmute(val) } }, cmd_write_buffer_marker2_amd: unsafe { unsafe extern "system" fn cmd_write_buffer_marker2_amd( _command_buffer: CommandBuffer, _stage: PipelineStageFlags2, _dst_buffer: Buffer, _dst_offset: DeviceSize, _marker: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_buffer_marker2_amd) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteBufferMarker2AMD\0"); let val = _f(cname); if val.is_null() { cmd_write_buffer_marker2_amd } else { ::core::mem::transmute(val) } }, get_queue_checkpoint_data2_nv: unsafe { unsafe extern "system" fn get_queue_checkpoint_data2_nv( _queue: Queue, _p_checkpoint_data_count: *mut u32, _p_checkpoint_data: *mut CheckpointData2NV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_queue_checkpoint_data2_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetQueueCheckpointData2NV\0"); let val = _f(cname); if val.is_null() { get_queue_checkpoint_data2_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_fragment_shader_barycentric"] pub mod fragment_shader_barycentric { use super::super::*; pub use { crate::vk::KHR_FRAGMENT_SHADER_BARYCENTRIC_NAME as NAME, crate::vk::KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_subgroup_uniform_control_flow"] pub mod shader_subgroup_uniform_control_flow { use super::super::*; pub use { crate::vk::KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_NAME as NAME, crate::vk::KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_zero_initialize_workgroup_memory"] pub mod zero_initialize_workgroup_memory { use super::super::*; pub use { crate::vk::KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_NAME as NAME, crate::vk::KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_workgroup_memory_explicit_layout"] pub mod workgroup_memory_explicit_layout { use super::super::*; pub use { crate::vk::KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_NAME as NAME, crate::vk::KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_copy_commands2"] pub mod copy_commands2 { use super::super::*; pub use { crate::vk::KHR_COPY_COMMANDS2_NAME as NAME, crate::vk::KHR_COPY_COMMANDS2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_copy_commands2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_copy_commands2 device-level function pointers"] pub struct DeviceFn { pub cmd_copy_buffer2_khr: PFN_vkCmdCopyBuffer2, pub cmd_copy_image2_khr: PFN_vkCmdCopyImage2, pub cmd_copy_buffer_to_image2_khr: PFN_vkCmdCopyBufferToImage2, pub cmd_copy_image_to_buffer2_khr: PFN_vkCmdCopyImageToBuffer2, pub cmd_blit_image2_khr: PFN_vkCmdBlitImage2, pub cmd_resolve_image2_khr: PFN_vkCmdResolveImage2, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_copy_buffer2_khr: unsafe { unsafe extern "system" fn cmd_copy_buffer2_khr( _command_buffer: CommandBuffer, _p_copy_buffer_info: *const CopyBufferInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer2KHR\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer2_khr } else { ::core::mem::transmute(val) } }, cmd_copy_image2_khr: unsafe { unsafe extern "system" fn cmd_copy_image2_khr( _command_buffer: CommandBuffer, _p_copy_image_info: *const CopyImageInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_image2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage2KHR\0"); let val = _f(cname); if val.is_null() { cmd_copy_image2_khr } else { ::core::mem::transmute(val) } }, cmd_copy_buffer_to_image2_khr: unsafe { unsafe extern "system" fn cmd_copy_buffer_to_image2_khr( _command_buffer: CommandBuffer, _p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_buffer_to_image2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBufferToImage2KHR\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer_to_image2_khr } else { ::core::mem::transmute(val) } }, cmd_copy_image_to_buffer2_khr: unsafe { unsafe extern "system" fn cmd_copy_image_to_buffer2_khr( _command_buffer: CommandBuffer, _p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_image_to_buffer2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImageToBuffer2KHR\0"); let val = _f(cname); if val.is_null() { cmd_copy_image_to_buffer2_khr } else { ::core::mem::transmute(val) } }, cmd_blit_image2_khr: unsafe { unsafe extern "system" fn cmd_blit_image2_khr( _command_buffer: CommandBuffer, _p_blit_image_info: *const BlitImageInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_blit_image2_khr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage2KHR\0"); let val = _f(cname); if val.is_null() { cmd_blit_image2_khr } else { ::core::mem::transmute(val) } }, cmd_resolve_image2_khr: unsafe { unsafe extern "system" fn cmd_resolve_image2_khr( _command_buffer: CommandBuffer, _p_resolve_image_info: *const ResolveImageInfo2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_resolve_image2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage2KHR\0"); let val = _f(cname); if val.is_null() { cmd_resolve_image2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_format_feature_flags2"] pub mod format_feature_flags2 { use super::super::*; pub use { crate::vk::KHR_FORMAT_FEATURE_FLAGS2_NAME as NAME, crate::vk::KHR_FORMAT_FEATURE_FLAGS2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_ray_tracing_maintenance1"] pub mod ray_tracing_maintenance1 { use super::super::*; pub use { crate::vk::KHR_RAY_TRACING_MAINTENANCE1_NAME as NAME, crate::vk::KHR_RAY_TRACING_MAINTENANCE1_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_ray_tracing_maintenance1 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_ray_tracing_maintenance1 device-level function pointers"] pub struct DeviceFn { pub cmd_trace_rays_indirect2_khr: PFN_vkCmdTraceRaysIndirect2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_trace_rays_indirect2_khr: unsafe { unsafe extern "system" fn cmd_trace_rays_indirect2_khr( _command_buffer: CommandBuffer, _indirect_device_address: DeviceAddress, ) { panic!(concat!( "Unable to load ", stringify!(cmd_trace_rays_indirect2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysIndirect2KHR\0"); let val = _f(cname); if val.is_null() { cmd_trace_rays_indirect2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_portability_enumeration"] pub mod portability_enumeration { use super::super::*; pub use { crate::vk::KHR_PORTABILITY_ENUMERATION_NAME as NAME, crate::vk::KHR_PORTABILITY_ENUMERATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_maintenance4"] pub mod maintenance4 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE4_NAME as NAME, crate::vk::KHR_MAINTENANCE4_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_maintenance4 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_maintenance4 device-level function pointers"] pub struct DeviceFn { pub get_device_buffer_memory_requirements_khr: PFN_vkGetDeviceBufferMemoryRequirements, pub get_device_image_memory_requirements_khr: PFN_vkGetDeviceImageMemoryRequirements, pub get_device_image_sparse_memory_requirements_khr: PFN_vkGetDeviceImageSparseMemoryRequirements, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_device_buffer_memory_requirements_khr: unsafe { unsafe extern "system" fn get_device_buffer_memory_requirements_khr( _device: crate::vk::Device, _p_info: *const DeviceBufferMemoryRequirements<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_buffer_memory_requirements_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceBufferMemoryRequirementsKHR\0", ); let val = _f(cname); if val.is_null() { get_device_buffer_memory_requirements_khr } else { ::core::mem::transmute(val) } }, get_device_image_memory_requirements_khr: unsafe { unsafe extern "system" fn get_device_image_memory_requirements_khr( _device: crate::vk::Device, _p_info: *const DeviceImageMemoryRequirements<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_image_memory_requirements_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceImageMemoryRequirementsKHR\0", ); let val = _f(cname); if val.is_null() { get_device_image_memory_requirements_khr } else { ::core::mem::transmute(val) } }, get_device_image_sparse_memory_requirements_khr: unsafe { unsafe extern "system" fn get_device_image_sparse_memory_requirements_khr( _device: crate::vk::Device, _p_info: *const DeviceImageMemoryRequirements<'_>, _p_sparse_memory_requirement_count: *mut u32, _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_image_sparse_memory_requirements_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceImageSparseMemoryRequirementsKHR\0", ); let val = _f(cname); if val.is_null() { get_device_image_sparse_memory_requirements_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_subgroup_rotate"] pub mod shader_subgroup_rotate { use super::super::*; pub use { crate::vk::KHR_SHADER_SUBGROUP_ROTATE_NAME as NAME, crate::vk::KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_maximal_reconvergence"] pub mod shader_maximal_reconvergence { use super::super::*; pub use { crate::vk::KHR_SHADER_MAXIMAL_RECONVERGENCE_NAME as NAME, crate::vk::KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_maintenance5"] pub mod maintenance5 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE5_NAME as NAME, crate::vk::KHR_MAINTENANCE5_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_maintenance5 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_maintenance5 device-level function pointers"] pub struct DeviceFn { pub cmd_bind_index_buffer2_khr: PFN_vkCmdBindIndexBuffer2KHR, pub get_rendering_area_granularity_khr: PFN_vkGetRenderingAreaGranularityKHR, pub get_device_image_subresource_layout_khr: PFN_vkGetDeviceImageSubresourceLayoutKHR, pub get_image_subresource_layout2_khr: PFN_vkGetImageSubresourceLayout2KHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_bind_index_buffer2_khr: unsafe { unsafe extern "system" fn cmd_bind_index_buffer2_khr( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _size: DeviceSize, _index_type: IndexType, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_index_buffer2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindIndexBuffer2KHR\0"); let val = _f(cname); if val.is_null() { cmd_bind_index_buffer2_khr } else { ::core::mem::transmute(val) } }, get_rendering_area_granularity_khr: unsafe { unsafe extern "system" fn get_rendering_area_granularity_khr( _device: crate::vk::Device, _p_rendering_area_info: *const RenderingAreaInfoKHR<'_>, _p_granularity: *mut Extent2D, ) { panic!(concat!( "Unable to load ", stringify!(get_rendering_area_granularity_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRenderingAreaGranularityKHR\0", ); let val = _f(cname); if val.is_null() { get_rendering_area_granularity_khr } else { ::core::mem::transmute(val) } }, get_device_image_subresource_layout_khr: unsafe { unsafe extern "system" fn get_device_image_subresource_layout_khr( _device: crate::vk::Device, _p_info: *const DeviceImageSubresourceInfoKHR<'_>, _p_layout: *mut SubresourceLayout2KHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_image_subresource_layout_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceImageSubresourceLayoutKHR\0", ); let val = _f(cname); if val.is_null() { get_device_image_subresource_layout_khr } else { ::core::mem::transmute(val) } }, get_image_subresource_layout2_khr: unsafe { unsafe extern "system" fn get_image_subresource_layout2_khr( _device: crate::vk::Device, _image: Image, _p_subresource: *const ImageSubresource2KHR<'_>, _p_layout: *mut SubresourceLayout2KHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_subresource_layout2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetImageSubresourceLayout2KHR\0", ); let val = _f(cname); if val.is_null() { get_image_subresource_layout2_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_ray_tracing_position_fetch"] pub mod ray_tracing_position_fetch { use super::super::*; pub use { crate::vk::KHR_RAY_TRACING_POSITION_FETCH_NAME as NAME, crate::vk::KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_cooperative_matrix"] pub mod cooperative_matrix { use super::super::*; pub use { crate::vk::KHR_COOPERATIVE_MATRIX_NAME as NAME, crate::vk::KHR_COOPERATIVE_MATRIX_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_cooperative_matrix instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_cooperative_matrix instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_cooperative_matrix_properties_khr: PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_cooperative_matrix_properties_khr: unsafe { unsafe extern "system" fn get_physical_device_cooperative_matrix_properties_khr( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut CooperativeMatrixPropertiesKHR<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_cooperative_matrix_properties_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_cooperative_matrix_properties_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_video_decode_av1"] pub mod video_decode_av1 { use super::super::*; pub use { crate::vk::KHR_VIDEO_DECODE_AV1_NAME as NAME, crate::vk::KHR_VIDEO_DECODE_AV1_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_video_maintenance1"] pub mod video_maintenance1 { use super::super::*; pub use { crate::vk::KHR_VIDEO_MAINTENANCE1_NAME as NAME, crate::vk::KHR_VIDEO_MAINTENANCE1_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_vertex_attribute_divisor"] pub mod vertex_attribute_divisor { use super::super::*; pub use { crate::vk::KHR_VERTEX_ATTRIBUTE_DIVISOR_NAME as NAME, crate::vk::KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_load_store_op_none"] pub mod load_store_op_none { use super::super::*; pub use { crate::vk::KHR_LOAD_STORE_OP_NONE_NAME as NAME, crate::vk::KHR_LOAD_STORE_OP_NONE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_shader_float_controls2"] pub mod shader_float_controls2 { use super::super::*; pub use { crate::vk::KHR_SHADER_FLOAT_CONTROLS2_NAME as NAME, crate::vk::KHR_SHADER_FLOAT_CONTROLS2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_index_type_uint8"] pub mod index_type_uint8 { use super::super::*; pub use { crate::vk::KHR_INDEX_TYPE_UINT8_NAME as NAME, crate::vk::KHR_INDEX_TYPE_UINT8_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_line_rasterization"] pub mod line_rasterization { use super::super::*; pub use { crate::vk::KHR_LINE_RASTERIZATION_NAME as NAME, crate::vk::KHR_LINE_RASTERIZATION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_line_rasterization device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_line_rasterization device-level function pointers"] pub struct DeviceFn { pub cmd_set_line_stipple_khr: PFN_vkCmdSetLineStippleKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_line_stipple_khr: unsafe { unsafe extern "system" fn cmd_set_line_stipple_khr( _command_buffer: CommandBuffer, _line_stipple_factor: u32, _line_stipple_pattern: u16, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_line_stipple_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineStippleKHR\0"); let val = _f(cname); if val.is_null() { cmd_set_line_stipple_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_calibrated_timestamps"] pub mod calibrated_timestamps { use super::super::*; pub use { crate::vk::KHR_CALIBRATED_TIMESTAMPS_NAME as NAME, crate::vk::KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_calibrated_timestamps instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_calibrated_timestamps instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_calibrateable_time_domains_khr: PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_calibrateable_time_domains_khr: unsafe { unsafe extern "system" fn get_physical_device_calibrateable_time_domains_khr( _physical_device: PhysicalDevice, _p_time_domain_count: *mut u32, _p_time_domains: *mut TimeDomainKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_calibrateable_time_domains_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceCalibrateableTimeDomainsKHR\0", ); let val = _f(cname); if val.is_null() { get_physical_device_calibrateable_time_domains_khr } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_KHR_calibrated_timestamps device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_calibrated_timestamps device-level function pointers"] pub struct DeviceFn { pub get_calibrated_timestamps_khr: PFN_vkGetCalibratedTimestampsKHR, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_calibrated_timestamps_khr: unsafe { unsafe extern "system" fn get_calibrated_timestamps_khr( _device: crate::vk::Device, _timestamp_count: u32, _p_timestamp_infos: *const CalibratedTimestampInfoKHR<'_>, _p_timestamps: *mut u64, _p_max_deviation: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_calibrated_timestamps_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetCalibratedTimestampsKHR\0"); let val = _f(cname); if val.is_null() { get_calibrated_timestamps_khr } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_KHR_shader_expect_assume"] pub mod shader_expect_assume { use super::super::*; pub use { crate::vk::KHR_SHADER_EXPECT_ASSUME_NAME as NAME, crate::vk::KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_KHR_maintenance6"] pub mod maintenance6 { use super::super::*; pub use { crate::vk::KHR_MAINTENANCE6_NAME as NAME, crate::vk::KHR_MAINTENANCE6_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_KHR_maintenance6 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_KHR_maintenance6 device-level function pointers"] pub struct DeviceFn { pub cmd_bind_descriptor_sets2_khr: PFN_vkCmdBindDescriptorSets2KHR, pub cmd_push_constants2_khr: PFN_vkCmdPushConstants2KHR, pub cmd_push_descriptor_set2_khr: PFN_vkCmdPushDescriptorSet2KHR, pub cmd_push_descriptor_set_with_template2_khr: PFN_vkCmdPushDescriptorSetWithTemplate2KHR, pub cmd_set_descriptor_buffer_offsets2_ext: PFN_vkCmdSetDescriptorBufferOffsets2EXT, pub cmd_bind_descriptor_buffer_embedded_samplers2_ext: PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_bind_descriptor_sets2_khr: unsafe { unsafe extern "system" fn cmd_bind_descriptor_sets2_khr( _command_buffer: CommandBuffer, _p_bind_descriptor_sets_info: *const BindDescriptorSetsInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_descriptor_sets2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindDescriptorSets2KHR\0"); let val = _f(cname); if val.is_null() { cmd_bind_descriptor_sets2_khr } else { ::core::mem::transmute(val) } }, cmd_push_constants2_khr: unsafe { unsafe extern "system" fn cmd_push_constants2_khr( _command_buffer: CommandBuffer, _p_push_constants_info: *const PushConstantsInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_constants2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPushConstants2KHR\0"); let val = _f(cname); if val.is_null() { cmd_push_constants2_khr } else { ::core::mem::transmute(val) } }, cmd_push_descriptor_set2_khr: unsafe { unsafe extern "system" fn cmd_push_descriptor_set2_khr( _command_buffer: CommandBuffer, _p_push_descriptor_set_info: *const PushDescriptorSetInfoKHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_descriptor_set2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPushDescriptorSet2KHR\0"); let val = _f(cname); if val.is_null() { cmd_push_descriptor_set2_khr } else { ::core::mem::transmute(val) } }, cmd_push_descriptor_set_with_template2_khr: unsafe { unsafe extern "system" fn cmd_push_descriptor_set_with_template2_khr( _command_buffer: CommandBuffer, _p_push_descriptor_set_with_template_info : * const PushDescriptorSetWithTemplateInfoKHR < '_ >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_push_descriptor_set_with_template2_khr) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdPushDescriptorSetWithTemplate2KHR\0", ); let val = _f(cname); if val.is_null() { cmd_push_descriptor_set_with_template2_khr } else { ::core::mem::transmute(val) } }, cmd_set_descriptor_buffer_offsets2_ext: unsafe { unsafe extern "system" fn cmd_set_descriptor_buffer_offsets2_ext( _command_buffer: CommandBuffer, _p_set_descriptor_buffer_offsets_info : * const SetDescriptorBufferOffsetsInfoEXT < '_ >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_descriptor_buffer_offsets2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetDescriptorBufferOffsets2EXT\0", ); let val = _f(cname); if val.is_null() { cmd_set_descriptor_buffer_offsets2_ext } else { ::core::mem::transmute(val) } }, cmd_bind_descriptor_buffer_embedded_samplers2_ext: unsafe { unsafe extern "system" fn cmd_bind_descriptor_buffer_embedded_samplers2_ext( _command_buffer: CommandBuffer, _p_bind_descriptor_buffer_embedded_samplers_info : * const BindDescriptorBufferEmbeddedSamplersInfoEXT < '_ >, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_descriptor_buffer_embedded_samplers2_ext) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBindDescriptorBufferEmbeddedSamplers2EXT\0", ); let val = _f(cname); if val.is_null() { cmd_bind_descriptor_buffer_embedded_samplers2_ext } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged LUNARG"] pub mod lunarg { #[doc = "VK_LUNARG_direct_driver_loading"] pub mod direct_driver_loading { use super::super::*; pub use { crate::vk::LUNARG_DIRECT_DRIVER_LOADING_NAME as NAME, crate::vk::LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged MSFT"] pub mod msft { #[doc = "VK_MSFT_layered_driver"] pub mod layered_driver { use super::super::*; pub use { crate::vk::MSFT_LAYERED_DRIVER_NAME as NAME, crate::vk::MSFT_LAYERED_DRIVER_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged MVK"] pub mod mvk { #[doc = "VK_MVK_ios_surface"] pub mod ios_surface { use super::super::*; pub use { crate::vk::MVK_IOS_SURFACE_NAME as NAME, crate::vk::MVK_IOS_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_MVK_ios_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_MVK_ios_surface instance-level function pointers"] pub struct InstanceFn { pub create_ios_surface_mvk: PFN_vkCreateIOSSurfaceMVK, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_ios_surface_mvk: unsafe { unsafe extern "system" fn create_ios_surface_mvk( _instance: crate::vk::Instance, _p_create_info: *const IOSSurfaceCreateInfoMVK<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_ios_surface_mvk) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateIOSSurfaceMVK\0"); let val = _f(cname); if val.is_null() { create_ios_surface_mvk } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_MVK_macos_surface"] pub mod macos_surface { use super::super::*; pub use { crate::vk::MVK_MACOS_SURFACE_NAME as NAME, crate::vk::MVK_MACOS_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_MVK_macos_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_MVK_macos_surface instance-level function pointers"] pub struct InstanceFn { pub create_mac_os_surface_mvk: PFN_vkCreateMacOSSurfaceMVK, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_mac_os_surface_mvk: unsafe { unsafe extern "system" fn create_mac_os_surface_mvk( _instance: crate::vk::Instance, _p_create_info: *const MacOSSurfaceCreateInfoMVK<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_mac_os_surface_mvk) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateMacOSSurfaceMVK\0"); let val = _f(cname); if val.is_null() { create_mac_os_surface_mvk } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged NN"] pub mod nn { #[doc = "VK_NN_vi_surface"] pub mod vi_surface { use super::super::*; pub use { crate::vk::NN_VI_SURFACE_NAME as NAME, crate::vk::NN_VI_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NN_vi_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NN_vi_surface instance-level function pointers"] pub struct InstanceFn { pub create_vi_surface_nn: PFN_vkCreateViSurfaceNN, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_vi_surface_nn: unsafe { unsafe extern "system" fn create_vi_surface_nn( _instance: crate::vk::Instance, _p_create_info: *const ViSurfaceCreateInfoNN<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_vi_surface_nn))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateViSurfaceNN\0"); let val = _f(cname); if val.is_null() { create_vi_surface_nn } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged NV"] pub mod nv { #[doc = "VK_NV_glsl_shader"] pub mod glsl_shader { use super::super::*; pub use { crate::vk::NV_GLSL_SHADER_NAME as NAME, crate::vk::NV_GLSL_SHADER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_dedicated_allocation"] pub mod dedicated_allocation { use super::super::*; pub use { crate::vk::NV_DEDICATED_ALLOCATION_NAME as NAME, crate::vk::NV_DEDICATED_ALLOCATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_corner_sampled_image"] pub mod corner_sampled_image { use super::super::*; pub use { crate::vk::NV_CORNER_SAMPLED_IMAGE_NAME as NAME, crate::vk::NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_external_memory_capabilities"] pub mod external_memory_capabilities { use super::super::*; pub use { crate::vk::NV_EXTERNAL_MEMORY_CAPABILITIES_NAME as NAME, crate::vk::NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_external_memory_capabilities instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_external_memory_capabilities instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_external_image_format_properties_nv: PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_external_image_format_properties_nv: unsafe { unsafe extern "system" fn get_physical_device_external_image_format_properties_nv( _physical_device: PhysicalDevice, _format: Format, _ty: ImageType, _tiling: ImageTiling, _usage: ImageUsageFlags, _flags: ImageCreateFlags, _external_handle_type: ExternalMemoryHandleTypeFlagsNV, _p_external_image_format_properties : * mut ExternalImageFormatPropertiesNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_image_format_properties_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalImageFormatPropertiesNV\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_image_format_properties_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_external_memory"] pub mod external_memory { use super::super::*; pub use { crate::vk::NV_EXTERNAL_MEMORY_NAME as NAME, crate::vk::NV_EXTERNAL_MEMORY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_external_memory_win32"] pub mod external_memory_win32 { use super::super::*; pub use { crate::vk::NV_EXTERNAL_MEMORY_WIN32_NAME as NAME, crate::vk::NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_external_memory_win32 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_external_memory_win32 device-level function pointers"] pub struct DeviceFn { pub get_memory_win32_handle_nv: PFN_vkGetMemoryWin32HandleNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_win32_handle_nv: unsafe { unsafe extern "system" fn get_memory_win32_handle_nv( _device: crate::vk::Device, _memory: DeviceMemory, _handle_type: ExternalMemoryHandleTypeFlagsNV, _p_handle: *mut HANDLE, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_win32_handle_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryWin32HandleNV\0"); let val = _f(cname); if val.is_null() { get_memory_win32_handle_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_win32_keyed_mutex"] pub mod win32_keyed_mutex { use super::super::*; pub use { crate::vk::NV_WIN32_KEYED_MUTEX_NAME as NAME, crate::vk::NV_WIN32_KEYED_MUTEX_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_clip_space_w_scaling"] pub mod clip_space_w_scaling { use super::super::*; pub use { crate::vk::NV_CLIP_SPACE_W_SCALING_NAME as NAME, crate::vk::NV_CLIP_SPACE_W_SCALING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_clip_space_w_scaling device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_clip_space_w_scaling device-level function pointers"] pub struct DeviceFn { pub cmd_set_viewport_w_scaling_nv: PFN_vkCmdSetViewportWScalingNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_viewport_w_scaling_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_w_scaling_nv( _command_buffer: CommandBuffer, _first_viewport: u32, _viewport_count: u32, _p_viewport_w_scalings: *const ViewportWScalingNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_w_scaling_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportWScalingNV\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_w_scaling_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_sample_mask_override_coverage"] pub mod sample_mask_override_coverage { use super::super::*; pub use { crate::vk::NV_SAMPLE_MASK_OVERRIDE_COVERAGE_NAME as NAME, crate::vk::NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_geometry_shader_passthrough"] pub mod geometry_shader_passthrough { use super::super::*; pub use { crate::vk::NV_GEOMETRY_SHADER_PASSTHROUGH_NAME as NAME, crate::vk::NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_viewport_array2"] pub mod viewport_array2 { use super::super::*; pub use { crate::vk::NV_VIEWPORT_ARRAY2_NAME as NAME, crate::vk::NV_VIEWPORT_ARRAY2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_viewport_swizzle"] pub mod viewport_swizzle { use super::super::*; pub use { crate::vk::NV_VIEWPORT_SWIZZLE_NAME as NAME, crate::vk::NV_VIEWPORT_SWIZZLE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_fragment_coverage_to_color"] pub mod fragment_coverage_to_color { use super::super::*; pub use { crate::vk::NV_FRAGMENT_COVERAGE_TO_COLOR_NAME as NAME, crate::vk::NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_framebuffer_mixed_samples"] pub mod framebuffer_mixed_samples { use super::super::*; pub use { crate::vk::NV_FRAMEBUFFER_MIXED_SAMPLES_NAME as NAME, crate::vk::NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_fill_rectangle"] pub mod fill_rectangle { use super::super::*; pub use { crate::vk::NV_FILL_RECTANGLE_NAME as NAME, crate::vk::NV_FILL_RECTANGLE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_shader_sm_builtins"] pub mod shader_sm_builtins { use super::super::*; pub use { crate::vk::NV_SHADER_SM_BUILTINS_NAME as NAME, crate::vk::NV_SHADER_SM_BUILTINS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_shading_rate_image"] pub mod shading_rate_image { use super::super::*; pub use { crate::vk::NV_SHADING_RATE_IMAGE_NAME as NAME, crate::vk::NV_SHADING_RATE_IMAGE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_shading_rate_image device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_shading_rate_image device-level function pointers"] pub struct DeviceFn { pub cmd_bind_shading_rate_image_nv: PFN_vkCmdBindShadingRateImageNV, pub cmd_set_viewport_shading_rate_palette_nv: PFN_vkCmdSetViewportShadingRatePaletteNV, pub cmd_set_coarse_sample_order_nv: PFN_vkCmdSetCoarseSampleOrderNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_bind_shading_rate_image_nv: unsafe { unsafe extern "system" fn cmd_bind_shading_rate_image_nv( _command_buffer: CommandBuffer, _image_view: ImageView, _image_layout: ImageLayout, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_shading_rate_image_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindShadingRateImageNV\0"); let val = _f(cname); if val.is_null() { cmd_bind_shading_rate_image_nv } else { ::core::mem::transmute(val) } }, cmd_set_viewport_shading_rate_palette_nv: unsafe { unsafe extern "system" fn cmd_set_viewport_shading_rate_palette_nv( _command_buffer: CommandBuffer, _first_viewport: u32, _viewport_count: u32, _p_shading_rate_palettes: *const ShadingRatePaletteNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_shading_rate_palette_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetViewportShadingRatePaletteNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_viewport_shading_rate_palette_nv } else { ::core::mem::transmute(val) } }, cmd_set_coarse_sample_order_nv: unsafe { unsafe extern "system" fn cmd_set_coarse_sample_order_nv( _command_buffer: CommandBuffer, _sample_order_type: CoarseSampleOrderTypeNV, _custom_sample_order_count: u32, _p_custom_sample_orders: *const CoarseSampleOrderCustomNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_coarse_sample_order_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCoarseSampleOrderNV\0"); let val = _f(cname); if val.is_null() { cmd_set_coarse_sample_order_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_ray_tracing"] pub mod ray_tracing { use super::super::*; pub use { crate::vk::NV_RAY_TRACING_NAME as NAME, crate::vk::NV_RAY_TRACING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_ray_tracing device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_ray_tracing device-level function pointers"] pub struct DeviceFn { pub create_acceleration_structure_nv: PFN_vkCreateAccelerationStructureNV, pub destroy_acceleration_structure_nv: PFN_vkDestroyAccelerationStructureNV, pub get_acceleration_structure_memory_requirements_nv: PFN_vkGetAccelerationStructureMemoryRequirementsNV, pub bind_acceleration_structure_memory_nv: PFN_vkBindAccelerationStructureMemoryNV, pub cmd_build_acceleration_structure_nv: PFN_vkCmdBuildAccelerationStructureNV, pub cmd_copy_acceleration_structure_nv: PFN_vkCmdCopyAccelerationStructureNV, pub cmd_trace_rays_nv: PFN_vkCmdTraceRaysNV, pub create_ray_tracing_pipelines_nv: PFN_vkCreateRayTracingPipelinesNV, pub get_ray_tracing_shader_group_handles_nv: PFN_vkGetRayTracingShaderGroupHandlesKHR, pub get_acceleration_structure_handle_nv: PFN_vkGetAccelerationStructureHandleNV, pub cmd_write_acceleration_structures_properties_nv: PFN_vkCmdWriteAccelerationStructuresPropertiesNV, pub compile_deferred_nv: PFN_vkCompileDeferredNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_acceleration_structure_nv: unsafe { unsafe extern "system" fn create_acceleration_structure_nv( _device: crate::vk::Device, _p_create_info: *const AccelerationStructureCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_acceleration_structure: *mut AccelerationStructureNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_acceleration_structure_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateAccelerationStructureNV\0", ); let val = _f(cname); if val.is_null() { create_acceleration_structure_nv } else { ::core::mem::transmute(val) } }, destroy_acceleration_structure_nv: unsafe { unsafe extern "system" fn destroy_acceleration_structure_nv( _device: crate::vk::Device, _acceleration_structure: AccelerationStructureNV, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_acceleration_structure_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyAccelerationStructureNV\0", ); let val = _f(cname); if val.is_null() { destroy_acceleration_structure_nv } else { ::core::mem::transmute(val) } }, get_acceleration_structure_memory_requirements_nv: unsafe { unsafe extern "system" fn get_acceleration_structure_memory_requirements_nv( _device: crate::vk::Device, _p_info: *const AccelerationStructureMemoryRequirementsInfoNV<'_>, _p_memory_requirements: *mut MemoryRequirements2KHR<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_acceleration_structure_memory_requirements_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAccelerationStructureMemoryRequirementsNV\0", ); let val = _f(cname); if val.is_null() { get_acceleration_structure_memory_requirements_nv } else { ::core::mem::transmute(val) } }, bind_acceleration_structure_memory_nv: unsafe { unsafe extern "system" fn bind_acceleration_structure_memory_nv( _device: crate::vk::Device, _bind_info_count: u32, _p_bind_infos: *const BindAccelerationStructureMemoryInfoNV<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(bind_acceleration_structure_memory_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkBindAccelerationStructureMemoryNV\0", ); let val = _f(cname); if val.is_null() { bind_acceleration_structure_memory_nv } else { ::core::mem::transmute(val) } }, cmd_build_acceleration_structure_nv: unsafe { unsafe extern "system" fn cmd_build_acceleration_structure_nv( _command_buffer: CommandBuffer, _p_info: *const AccelerationStructureInfoNV<'_>, _instance_data: Buffer, _instance_offset: DeviceSize, _update: Bool32, _dst: AccelerationStructureNV, _src: AccelerationStructureNV, _scratch: Buffer, _scratch_offset: DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_build_acceleration_structure_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBuildAccelerationStructureNV\0", ); let val = _f(cname); if val.is_null() { cmd_build_acceleration_structure_nv } else { ::core::mem::transmute(val) } }, cmd_copy_acceleration_structure_nv: unsafe { unsafe extern "system" fn cmd_copy_acceleration_structure_nv( _command_buffer: CommandBuffer, _dst: AccelerationStructureNV, _src: AccelerationStructureNV, _mode: CopyAccelerationStructureModeKHR, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_acceleration_structure_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdCopyAccelerationStructureNV\0", ); let val = _f(cname); if val.is_null() { cmd_copy_acceleration_structure_nv } else { ::core::mem::transmute(val) } }, cmd_trace_rays_nv: unsafe { unsafe extern "system" fn cmd_trace_rays_nv( _command_buffer: CommandBuffer, _raygen_shader_binding_table_buffer: Buffer, _raygen_shader_binding_offset: DeviceSize, _miss_shader_binding_table_buffer: Buffer, _miss_shader_binding_offset: DeviceSize, _miss_shader_binding_stride: DeviceSize, _hit_shader_binding_table_buffer: Buffer, _hit_shader_binding_offset: DeviceSize, _hit_shader_binding_stride: DeviceSize, _callable_shader_binding_table_buffer: Buffer, _callable_shader_binding_offset: DeviceSize, _callable_shader_binding_stride: DeviceSize, _width: u32, _height: u32, _depth: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_nv))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysNV\0"); let val = _f(cname); if val.is_null() { cmd_trace_rays_nv } else { ::core::mem::transmute(val) } }, create_ray_tracing_pipelines_nv: unsafe { unsafe extern "system" fn create_ray_tracing_pipelines_nv( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _create_info_count: u32, _p_create_infos: *const RayTracingPipelineCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipelines: *mut Pipeline, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_ray_tracing_pipelines_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateRayTracingPipelinesNV\0"); let val = _f(cname); if val.is_null() { create_ray_tracing_pipelines_nv } else { ::core::mem::transmute(val) } }, get_ray_tracing_shader_group_handles_nv: unsafe { unsafe extern "system" fn get_ray_tracing_shader_group_handles_nv( _device: crate::vk::Device, _pipeline: Pipeline, _first_group: u32, _group_count: u32, _data_size: usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_ray_tracing_shader_group_handles_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetRayTracingShaderGroupHandlesNV\0", ); let val = _f(cname); if val.is_null() { get_ray_tracing_shader_group_handles_nv } else { ::core::mem::transmute(val) } }, get_acceleration_structure_handle_nv: unsafe { unsafe extern "system" fn get_acceleration_structure_handle_nv( _device: crate::vk::Device, _acceleration_structure: AccelerationStructureNV, _data_size: usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_acceleration_structure_handle_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetAccelerationStructureHandleNV\0", ); let val = _f(cname); if val.is_null() { get_acceleration_structure_handle_nv } else { ::core::mem::transmute(val) } }, cmd_write_acceleration_structures_properties_nv: unsafe { unsafe extern "system" fn cmd_write_acceleration_structures_properties_nv( _command_buffer: CommandBuffer, _acceleration_structure_count: u32, _p_acceleration_structures: *const AccelerationStructureNV, _query_type: QueryType, _query_pool: QueryPool, _first_query: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_write_acceleration_structures_properties_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdWriteAccelerationStructuresPropertiesNV\0", ); let val = _f(cname); if val.is_null() { cmd_write_acceleration_structures_properties_nv } else { ::core::mem::transmute(val) } }, compile_deferred_nv: unsafe { unsafe extern "system" fn compile_deferred_nv( _device: crate::vk::Device, _pipeline: Pipeline, _shader: u32, ) -> Result { panic!(concat!("Unable to load ", stringify!(compile_deferred_nv))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCompileDeferredNV\0"); let val = _f(cname); if val.is_null() { compile_deferred_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_representative_fragment_test"] pub mod representative_fragment_test { use super::super::*; pub use { crate::vk::NV_REPRESENTATIVE_FRAGMENT_TEST_NAME as NAME, crate::vk::NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_shader_subgroup_partitioned"] pub mod shader_subgroup_partitioned { use super::super::*; pub use { crate::vk::NV_SHADER_SUBGROUP_PARTITIONED_NAME as NAME, crate::vk::NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_compute_shader_derivatives"] pub mod compute_shader_derivatives { use super::super::*; pub use { crate::vk::NV_COMPUTE_SHADER_DERIVATIVES_NAME as NAME, crate::vk::NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_mesh_shader"] pub mod mesh_shader { use super::super::*; pub use { crate::vk::NV_MESH_SHADER_NAME as NAME, crate::vk::NV_MESH_SHADER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_mesh_shader device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_mesh_shader device-level function pointers"] pub struct DeviceFn { pub cmd_draw_mesh_tasks_nv: PFN_vkCmdDrawMeshTasksNV, pub cmd_draw_mesh_tasks_indirect_nv: PFN_vkCmdDrawMeshTasksIndirectNV, pub cmd_draw_mesh_tasks_indirect_count_nv: PFN_vkCmdDrawMeshTasksIndirectCountNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_mesh_tasks_nv: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_nv( _command_buffer: CommandBuffer, _task_count: u32, _first_task: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksNV\0"); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_nv } else { ::core::mem::transmute(val) } }, cmd_draw_mesh_tasks_indirect_nv: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_nv( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_indirect_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksIndirectNV\0"); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_indirect_nv } else { ::core::mem::transmute(val) } }, cmd_draw_mesh_tasks_indirect_count_nv: unsafe { unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_nv( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_mesh_tasks_indirect_count_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDrawMeshTasksIndirectCountNV\0", ); let val = _f(cname); if val.is_null() { cmd_draw_mesh_tasks_indirect_count_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_fragment_shader_barycentric"] pub mod fragment_shader_barycentric { use super::super::*; pub use { crate::vk::NV_FRAGMENT_SHADER_BARYCENTRIC_NAME as NAME, crate::vk::NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_shader_image_footprint"] pub mod shader_image_footprint { use super::super::*; pub use { crate::vk::NV_SHADER_IMAGE_FOOTPRINT_NAME as NAME, crate::vk::NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_scissor_exclusive"] pub mod scissor_exclusive { use super::super::*; pub use { crate::vk::NV_SCISSOR_EXCLUSIVE_NAME as NAME, crate::vk::NV_SCISSOR_EXCLUSIVE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_scissor_exclusive device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_scissor_exclusive device-level function pointers"] pub struct DeviceFn { pub cmd_set_exclusive_scissor_enable_nv: PFN_vkCmdSetExclusiveScissorEnableNV, pub cmd_set_exclusive_scissor_nv: PFN_vkCmdSetExclusiveScissorNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_exclusive_scissor_enable_nv: unsafe { unsafe extern "system" fn cmd_set_exclusive_scissor_enable_nv( _command_buffer: CommandBuffer, _first_exclusive_scissor: u32, _exclusive_scissor_count: u32, _p_exclusive_scissor_enables: *const Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_exclusive_scissor_enable_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetExclusiveScissorEnableNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_exclusive_scissor_enable_nv } else { ::core::mem::transmute(val) } }, cmd_set_exclusive_scissor_nv: unsafe { unsafe extern "system" fn cmd_set_exclusive_scissor_nv( _command_buffer: CommandBuffer, _first_exclusive_scissor: u32, _exclusive_scissor_count: u32, _p_exclusive_scissors: *const Rect2D, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_exclusive_scissor_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetExclusiveScissorNV\0"); let val = _f(cname); if val.is_null() { cmd_set_exclusive_scissor_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_device_diagnostic_checkpoints"] pub mod device_diagnostic_checkpoints { use super::super::*; pub use { crate::vk::NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_NAME as NAME, crate::vk::NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_device_diagnostic_checkpoints device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_device_diagnostic_checkpoints device-level function pointers"] pub struct DeviceFn { pub cmd_set_checkpoint_nv: PFN_vkCmdSetCheckpointNV, pub get_queue_checkpoint_data_nv: PFN_vkGetQueueCheckpointDataNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_checkpoint_nv: unsafe { unsafe extern "system" fn cmd_set_checkpoint_nv( _command_buffer: CommandBuffer, _p_checkpoint_marker: *const c_void, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_checkpoint_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCheckpointNV\0"); let val = _f(cname); if val.is_null() { cmd_set_checkpoint_nv } else { ::core::mem::transmute(val) } }, get_queue_checkpoint_data_nv: unsafe { unsafe extern "system" fn get_queue_checkpoint_data_nv( _queue: Queue, _p_checkpoint_data_count: *mut u32, _p_checkpoint_data: *mut CheckpointDataNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_queue_checkpoint_data_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetQueueCheckpointDataNV\0"); let val = _f(cname); if val.is_null() { get_queue_checkpoint_data_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_dedicated_allocation_image_aliasing"] pub mod dedicated_allocation_image_aliasing { use super::super::*; pub use { crate::vk::NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_NAME as NAME, crate::vk::NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_cooperative_matrix"] pub mod cooperative_matrix { use super::super::*; pub use { crate::vk::NV_COOPERATIVE_MATRIX_NAME as NAME, crate::vk::NV_COOPERATIVE_MATRIX_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_cooperative_matrix instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_cooperative_matrix instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_cooperative_matrix_properties_nv: PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_cooperative_matrix_properties_nv: unsafe { unsafe extern "system" fn get_physical_device_cooperative_matrix_properties_nv( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut CooperativeMatrixPropertiesNV<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_cooperative_matrix_properties_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV\0", ); let val = _f(cname); if val.is_null() { get_physical_device_cooperative_matrix_properties_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_coverage_reduction_mode"] pub mod coverage_reduction_mode { use super::super::*; pub use { crate::vk::NV_COVERAGE_REDUCTION_MODE_NAME as NAME, crate::vk::NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_coverage_reduction_mode instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_coverage_reduction_mode instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_supported_framebuffer_mixed_samples_combinations_nv: PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_supported_framebuffer_mixed_samples_combinations_nv: unsafe { unsafe extern "system" fn get_physical_device_supported_framebuffer_mixed_samples_combinations_nv( _physical_device: PhysicalDevice, _p_combination_count: *mut u32, _p_combinations: *mut FramebufferMixedSamplesCombinationNV<'_>, ) -> Result { panic ! (concat ! ("Unable to load " , stringify ! (get_physical_device_supported_framebuffer_mixed_samples_combinations_nv))) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV\0", ); let val = _f(cname); if val.is_null() { get_physical_device_supported_framebuffer_mixed_samples_combinations_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_device_generated_commands"] pub mod device_generated_commands { use super::super::*; pub use { crate::vk::NV_DEVICE_GENERATED_COMMANDS_NAME as NAME, crate::vk::NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_device_generated_commands device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_device_generated_commands device-level function pointers"] pub struct DeviceFn { pub get_generated_commands_memory_requirements_nv: PFN_vkGetGeneratedCommandsMemoryRequirementsNV, pub cmd_preprocess_generated_commands_nv: PFN_vkCmdPreprocessGeneratedCommandsNV, pub cmd_execute_generated_commands_nv: PFN_vkCmdExecuteGeneratedCommandsNV, pub cmd_bind_pipeline_shader_group_nv: PFN_vkCmdBindPipelineShaderGroupNV, pub create_indirect_commands_layout_nv: PFN_vkCreateIndirectCommandsLayoutNV, pub destroy_indirect_commands_layout_nv: PFN_vkDestroyIndirectCommandsLayoutNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_generated_commands_memory_requirements_nv: unsafe { unsafe extern "system" fn get_generated_commands_memory_requirements_nv( _device: crate::vk::Device, _p_info: *const GeneratedCommandsMemoryRequirementsInfoNV<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_generated_commands_memory_requirements_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetGeneratedCommandsMemoryRequirementsNV\0", ); let val = _f(cname); if val.is_null() { get_generated_commands_memory_requirements_nv } else { ::core::mem::transmute(val) } }, cmd_preprocess_generated_commands_nv: unsafe { unsafe extern "system" fn cmd_preprocess_generated_commands_nv( _command_buffer: CommandBuffer, _p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_preprocess_generated_commands_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdPreprocessGeneratedCommandsNV\0", ); let val = _f(cname); if val.is_null() { cmd_preprocess_generated_commands_nv } else { ::core::mem::transmute(val) } }, cmd_execute_generated_commands_nv: unsafe { unsafe extern "system" fn cmd_execute_generated_commands_nv( _command_buffer: CommandBuffer, _is_preprocessed: Bool32, _p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_execute_generated_commands_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdExecuteGeneratedCommandsNV\0", ); let val = _f(cname); if val.is_null() { cmd_execute_generated_commands_nv } else { ::core::mem::transmute(val) } }, cmd_bind_pipeline_shader_group_nv: unsafe { unsafe extern "system" fn cmd_bind_pipeline_shader_group_nv( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _pipeline: Pipeline, _group_index: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_pipeline_shader_group_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdBindPipelineShaderGroupNV\0", ); let val = _f(cname); if val.is_null() { cmd_bind_pipeline_shader_group_nv } else { ::core::mem::transmute(val) } }, create_indirect_commands_layout_nv: unsafe { unsafe extern "system" fn create_indirect_commands_layout_nv( _device: crate::vk::Device, _p_create_info: *const IndirectCommandsLayoutCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_indirect_commands_layout: *mut IndirectCommandsLayoutNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_indirect_commands_layout_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCreateIndirectCommandsLayoutNV\0", ); let val = _f(cname); if val.is_null() { create_indirect_commands_layout_nv } else { ::core::mem::transmute(val) } }, destroy_indirect_commands_layout_nv: unsafe { unsafe extern "system" fn destroy_indirect_commands_layout_nv( _device: crate::vk::Device, _indirect_commands_layout: IndirectCommandsLayoutNV, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_indirect_commands_layout_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkDestroyIndirectCommandsLayoutNV\0", ); let val = _f(cname); if val.is_null() { destroy_indirect_commands_layout_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_inherited_viewport_scissor"] pub mod inherited_viewport_scissor { use super::super::*; pub use { crate::vk::NV_INHERITED_VIEWPORT_SCISSOR_NAME as NAME, crate::vk::NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_present_barrier"] pub mod present_barrier { use super::super::*; pub use { crate::vk::NV_PRESENT_BARRIER_NAME as NAME, crate::vk::NV_PRESENT_BARRIER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_device_diagnostics_config"] pub mod device_diagnostics_config { use super::super::*; pub use { crate::vk::NV_DEVICE_DIAGNOSTICS_CONFIG_NAME as NAME, crate::vk::NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_cuda_kernel_launch"] pub mod cuda_kernel_launch { use super::super::*; pub use { crate::vk::NV_CUDA_KERNEL_LAUNCH_NAME as NAME, crate::vk::NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_cuda_kernel_launch device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_cuda_kernel_launch device-level function pointers"] pub struct DeviceFn { pub create_cuda_module_nv: PFN_vkCreateCudaModuleNV, pub get_cuda_module_cache_nv: PFN_vkGetCudaModuleCacheNV, pub create_cuda_function_nv: PFN_vkCreateCudaFunctionNV, pub destroy_cuda_module_nv: PFN_vkDestroyCudaModuleNV, pub destroy_cuda_function_nv: PFN_vkDestroyCudaFunctionNV, pub cmd_cuda_launch_kernel_nv: PFN_vkCmdCudaLaunchKernelNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_cuda_module_nv: unsafe { unsafe extern "system" fn create_cuda_module_nv( _device: crate::vk::Device, _p_create_info: *const CudaModuleCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_module: *mut CudaModuleNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_cuda_module_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateCudaModuleNV\0"); let val = _f(cname); if val.is_null() { create_cuda_module_nv } else { ::core::mem::transmute(val) } }, get_cuda_module_cache_nv: unsafe { unsafe extern "system" fn get_cuda_module_cache_nv( _device: crate::vk::Device, _module: CudaModuleNV, _p_cache_size: *mut usize, _p_cache_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_cuda_module_cache_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetCudaModuleCacheNV\0"); let val = _f(cname); if val.is_null() { get_cuda_module_cache_nv } else { ::core::mem::transmute(val) } }, create_cuda_function_nv: unsafe { unsafe extern "system" fn create_cuda_function_nv( _device: crate::vk::Device, _p_create_info: *const CudaFunctionCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_function: *mut CudaFunctionNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_cuda_function_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateCudaFunctionNV\0"); let val = _f(cname); if val.is_null() { create_cuda_function_nv } else { ::core::mem::transmute(val) } }, destroy_cuda_module_nv: unsafe { unsafe extern "system" fn destroy_cuda_module_nv( _device: crate::vk::Device, _module: CudaModuleNV, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_cuda_module_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyCudaModuleNV\0"); let val = _f(cname); if val.is_null() { destroy_cuda_module_nv } else { ::core::mem::transmute(val) } }, destroy_cuda_function_nv: unsafe { unsafe extern "system" fn destroy_cuda_function_nv( _device: crate::vk::Device, _function: CudaFunctionNV, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_cuda_function_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyCudaFunctionNV\0"); let val = _f(cname); if val.is_null() { destroy_cuda_function_nv } else { ::core::mem::transmute(val) } }, cmd_cuda_launch_kernel_nv: unsafe { unsafe extern "system" fn cmd_cuda_launch_kernel_nv( _command_buffer: CommandBuffer, _p_launch_info: *const CudaLaunchInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_cuda_launch_kernel_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCudaLaunchKernelNV\0"); let val = _f(cname); if val.is_null() { cmd_cuda_launch_kernel_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_low_latency"] pub mod low_latency { use super::super::*; pub use { crate::vk::NV_LOW_LATENCY_NAME as NAME, crate::vk::NV_LOW_LATENCY_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_fragment_shading_rate_enums"] pub mod fragment_shading_rate_enums { use super::super::*; pub use { crate::vk::NV_FRAGMENT_SHADING_RATE_ENUMS_NAME as NAME, crate::vk::NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_fragment_shading_rate_enums device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_fragment_shading_rate_enums device-level function pointers"] pub struct DeviceFn { pub cmd_set_fragment_shading_rate_enum_nv: PFN_vkCmdSetFragmentShadingRateEnumNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_set_fragment_shading_rate_enum_nv: unsafe { unsafe extern "system" fn cmd_set_fragment_shading_rate_enum_nv( _command_buffer: CommandBuffer, _shading_rate: FragmentShadingRateNV, _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize], ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_fragment_shading_rate_enum_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdSetFragmentShadingRateEnumNV\0", ); let val = _f(cname); if val.is_null() { cmd_set_fragment_shading_rate_enum_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_ray_tracing_motion_blur"] pub mod ray_tracing_motion_blur { use super::super::*; pub use { crate::vk::NV_RAY_TRACING_MOTION_BLUR_NAME as NAME, crate::vk::NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_acquire_winrt_display"] pub mod acquire_winrt_display { use super::super::*; pub use { crate::vk::NV_ACQUIRE_WINRT_DISPLAY_NAME as NAME, crate::vk::NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_acquire_winrt_display instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_acquire_winrt_display instance-level function pointers"] pub struct InstanceFn { pub acquire_winrt_display_nv: PFN_vkAcquireWinrtDisplayNV, pub get_winrt_display_nv: PFN_vkGetWinrtDisplayNV, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { acquire_winrt_display_nv: unsafe { unsafe extern "system" fn acquire_winrt_display_nv( _physical_device: PhysicalDevice, _display: DisplayKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(acquire_winrt_display_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAcquireWinrtDisplayNV\0"); let val = _f(cname); if val.is_null() { acquire_winrt_display_nv } else { ::core::mem::transmute(val) } }, get_winrt_display_nv: unsafe { unsafe extern "system" fn get_winrt_display_nv( _physical_device: PhysicalDevice, _device_relative_id: u32, _p_display: *mut DisplayKHR, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_winrt_display_nv))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetWinrtDisplayNV\0"); let val = _f(cname); if val.is_null() { get_winrt_display_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_external_memory_rdma"] pub mod external_memory_rdma { use super::super::*; pub use { crate::vk::NV_EXTERNAL_MEMORY_RDMA_NAME as NAME, crate::vk::NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_external_memory_rdma device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_external_memory_rdma device-level function pointers"] pub struct DeviceFn { pub get_memory_remote_address_nv: PFN_vkGetMemoryRemoteAddressNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_memory_remote_address_nv: unsafe { unsafe extern "system" fn get_memory_remote_address_nv( _device: crate::vk::Device, _p_memory_get_remote_address_info: *const MemoryGetRemoteAddressInfoNV< '_, >, _p_address: *mut RemoteAddressNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_memory_remote_address_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryRemoteAddressNV\0"); let val = _f(cname); if val.is_null() { get_memory_remote_address_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_displacement_micromap"] pub mod displacement_micromap { use super::super::*; pub use { crate::vk::NV_DISPLACEMENT_MICROMAP_NAME as NAME, crate::vk::NV_DISPLACEMENT_MICROMAP_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_copy_memory_indirect"] pub mod copy_memory_indirect { use super::super::*; pub use { crate::vk::NV_COPY_MEMORY_INDIRECT_NAME as NAME, crate::vk::NV_COPY_MEMORY_INDIRECT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_copy_memory_indirect device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_copy_memory_indirect device-level function pointers"] pub struct DeviceFn { pub cmd_copy_memory_indirect_nv: PFN_vkCmdCopyMemoryIndirectNV, pub cmd_copy_memory_to_image_indirect_nv: PFN_vkCmdCopyMemoryToImageIndirectNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_copy_memory_indirect_nv: unsafe { unsafe extern "system" fn cmd_copy_memory_indirect_nv( _command_buffer: CommandBuffer, _copy_buffer_address: DeviceAddress, _copy_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_memory_indirect_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMemoryIndirectNV\0"); let val = _f(cname); if val.is_null() { cmd_copy_memory_indirect_nv } else { ::core::mem::transmute(val) } }, cmd_copy_memory_to_image_indirect_nv: unsafe { unsafe extern "system" fn cmd_copy_memory_to_image_indirect_nv( _command_buffer: CommandBuffer, _copy_buffer_address: DeviceAddress, _copy_count: u32, _stride: u32, _dst_image: Image, _dst_image_layout: ImageLayout, _p_image_subresources: *const ImageSubresourceLayers, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_memory_to_image_indirect_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdCopyMemoryToImageIndirectNV\0", ); let val = _f(cname); if val.is_null() { cmd_copy_memory_to_image_indirect_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_memory_decompression"] pub mod memory_decompression { use super::super::*; pub use { crate::vk::NV_MEMORY_DECOMPRESSION_NAME as NAME, crate::vk::NV_MEMORY_DECOMPRESSION_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_memory_decompression device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_memory_decompression device-level function pointers"] pub struct DeviceFn { pub cmd_decompress_memory_nv: PFN_vkCmdDecompressMemoryNV, pub cmd_decompress_memory_indirect_count_nv: PFN_vkCmdDecompressMemoryIndirectCountNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_decompress_memory_nv: unsafe { unsafe extern "system" fn cmd_decompress_memory_nv( _command_buffer: CommandBuffer, _decompress_region_count: u32, _p_decompress_memory_regions: *const DecompressMemoryRegionNV, ) { panic!(concat!( "Unable to load ", stringify!(cmd_decompress_memory_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDecompressMemoryNV\0"); let val = _f(cname); if val.is_null() { cmd_decompress_memory_nv } else { ::core::mem::transmute(val) } }, cmd_decompress_memory_indirect_count_nv: unsafe { unsafe extern "system" fn cmd_decompress_memory_indirect_count_nv( _command_buffer: CommandBuffer, _indirect_commands_address: DeviceAddress, _indirect_commands_count_address: DeviceAddress, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_decompress_memory_indirect_count_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdDecompressMemoryIndirectCountNV\0", ); let val = _f(cname); if val.is_null() { cmd_decompress_memory_indirect_count_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_device_generated_commands_compute"] pub mod device_generated_commands_compute { use super::super::*; pub use { crate::vk::NV_DEVICE_GENERATED_COMMANDS_COMPUTE_NAME as NAME, crate::vk::NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_device_generated_commands_compute device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_device_generated_commands_compute device-level function pointers"] pub struct DeviceFn { pub get_pipeline_indirect_memory_requirements_nv: PFN_vkGetPipelineIndirectMemoryRequirementsNV, pub cmd_update_pipeline_indirect_buffer_nv: PFN_vkCmdUpdatePipelineIndirectBufferNV, pub get_pipeline_indirect_device_address_nv: PFN_vkGetPipelineIndirectDeviceAddressNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_pipeline_indirect_memory_requirements_nv: unsafe { unsafe extern "system" fn get_pipeline_indirect_memory_requirements_nv( _device: crate::vk::Device, _p_create_info: *const ComputePipelineCreateInfo<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_pipeline_indirect_memory_requirements_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPipelineIndirectMemoryRequirementsNV\0", ); let val = _f(cname); if val.is_null() { get_pipeline_indirect_memory_requirements_nv } else { ::core::mem::transmute(val) } }, cmd_update_pipeline_indirect_buffer_nv: unsafe { unsafe extern "system" fn cmd_update_pipeline_indirect_buffer_nv( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _pipeline: Pipeline, ) { panic!(concat!( "Unable to load ", stringify!(cmd_update_pipeline_indirect_buffer_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkCmdUpdatePipelineIndirectBufferNV\0", ); let val = _f(cname); if val.is_null() { cmd_update_pipeline_indirect_buffer_nv } else { ::core::mem::transmute(val) } }, get_pipeline_indirect_device_address_nv: unsafe { unsafe extern "system" fn get_pipeline_indirect_device_address_nv( _device: crate::vk::Device, _p_info: *const PipelineIndirectDeviceAddressInfoNV<'_>, ) -> DeviceAddress { panic!(concat!( "Unable to load ", stringify!(get_pipeline_indirect_device_address_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPipelineIndirectDeviceAddressNV\0", ); let val = _f(cname); if val.is_null() { get_pipeline_indirect_device_address_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_linear_color_attachment"] pub mod linear_color_attachment { use super::super::*; pub use { crate::vk::NV_LINEAR_COLOR_ATTACHMENT_NAME as NAME, crate::vk::NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_optical_flow"] pub mod optical_flow { use super::super::*; pub use { crate::vk::NV_OPTICAL_FLOW_NAME as NAME, crate::vk::NV_OPTICAL_FLOW_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_optical_flow instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_optical_flow instance-level function pointers"] pub struct InstanceFn { pub get_physical_device_optical_flow_image_formats_nv: PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_optical_flow_image_formats_nv: unsafe { unsafe extern "system" fn get_physical_device_optical_flow_image_formats_nv( _physical_device: PhysicalDevice, _p_optical_flow_image_format_info: *const OpticalFlowImageFormatInfoNV< '_, >, _p_format_count: *mut u32, _p_image_format_properties: *mut OpticalFlowImageFormatPropertiesNV<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_optical_flow_image_formats_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceOpticalFlowImageFormatsNV\0", ); let val = _f(cname); if val.is_null() { get_physical_device_optical_flow_image_formats_nv } else { ::core::mem::transmute(val) } }, } } } #[doc = "VK_NV_optical_flow device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_optical_flow device-level function pointers"] pub struct DeviceFn { pub create_optical_flow_session_nv: PFN_vkCreateOpticalFlowSessionNV, pub destroy_optical_flow_session_nv: PFN_vkDestroyOpticalFlowSessionNV, pub bind_optical_flow_session_image_nv: PFN_vkBindOpticalFlowSessionImageNV, pub cmd_optical_flow_execute_nv: PFN_vkCmdOpticalFlowExecuteNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_optical_flow_session_nv: unsafe { unsafe extern "system" fn create_optical_flow_session_nv( _device: crate::vk::Device, _p_create_info: *const OpticalFlowSessionCreateInfoNV<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_session: *mut OpticalFlowSessionNV, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_optical_flow_session_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateOpticalFlowSessionNV\0"); let val = _f(cname); if val.is_null() { create_optical_flow_session_nv } else { ::core::mem::transmute(val) } }, destroy_optical_flow_session_nv: unsafe { unsafe extern "system" fn destroy_optical_flow_session_nv( _device: crate::vk::Device, _session: OpticalFlowSessionNV, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_optical_flow_session_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyOpticalFlowSessionNV\0"); let val = _f(cname); if val.is_null() { destroy_optical_flow_session_nv } else { ::core::mem::transmute(val) } }, bind_optical_flow_session_image_nv: unsafe { unsafe extern "system" fn bind_optical_flow_session_image_nv( _device: crate::vk::Device, _session: OpticalFlowSessionNV, _binding_point: OpticalFlowSessionBindingPointNV, _view: ImageView, _layout: ImageLayout, ) -> Result { panic!(concat!( "Unable to load ", stringify!(bind_optical_flow_session_image_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkBindOpticalFlowSessionImageNV\0", ); let val = _f(cname); if val.is_null() { bind_optical_flow_session_image_nv } else { ::core::mem::transmute(val) } }, cmd_optical_flow_execute_nv: unsafe { unsafe extern "system" fn cmd_optical_flow_execute_nv( _command_buffer: CommandBuffer, _session: OpticalFlowSessionNV, _p_execute_info: *const OpticalFlowExecuteInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_optical_flow_execute_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdOpticalFlowExecuteNV\0"); let val = _f(cname); if val.is_null() { cmd_optical_flow_execute_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_ray_tracing_invocation_reorder"] pub mod ray_tracing_invocation_reorder { use super::super::*; pub use { crate::vk::NV_RAY_TRACING_INVOCATION_REORDER_NAME as NAME, crate::vk::NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_extended_sparse_address_space"] pub mod extended_sparse_address_space { use super::super::*; pub use { crate::vk::NV_EXTENDED_SPARSE_ADDRESS_SPACE_NAME as NAME, crate::vk::NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_low_latency2"] pub mod low_latency2 { use super::super::*; pub use { crate::vk::NV_LOW_LATENCY2_NAME as NAME, crate::vk::NV_LOW_LATENCY2_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NV_low_latency2 device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NV_low_latency2 device-level function pointers"] pub struct DeviceFn { pub set_latency_sleep_mode_nv: PFN_vkSetLatencySleepModeNV, pub latency_sleep_nv: PFN_vkLatencySleepNV, pub set_latency_marker_nv: PFN_vkSetLatencyMarkerNV, pub get_latency_timings_nv: PFN_vkGetLatencyTimingsNV, pub queue_notify_out_of_band_nv: PFN_vkQueueNotifyOutOfBandNV, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { set_latency_sleep_mode_nv: unsafe { unsafe extern "system" fn set_latency_sleep_mode_nv( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_sleep_mode_info: *const LatencySleepModeInfoNV<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(set_latency_sleep_mode_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetLatencySleepModeNV\0"); let val = _f(cname); if val.is_null() { set_latency_sleep_mode_nv } else { ::core::mem::transmute(val) } }, latency_sleep_nv: unsafe { unsafe extern "system" fn latency_sleep_nv( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_sleep_info: *const LatencySleepInfoNV<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(latency_sleep_nv))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkLatencySleepNV\0"); let val = _f(cname); if val.is_null() { latency_sleep_nv } else { ::core::mem::transmute(val) } }, set_latency_marker_nv: unsafe { unsafe extern "system" fn set_latency_marker_nv( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_latency_marker_info: *const SetLatencyMarkerInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(set_latency_marker_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetLatencyMarkerNV\0"); let val = _f(cname); if val.is_null() { set_latency_marker_nv } else { ::core::mem::transmute(val) } }, get_latency_timings_nv: unsafe { unsafe extern "system" fn get_latency_timings_nv( _device: crate::vk::Device, _swapchain: SwapchainKHR, _p_latency_marker_info: *mut GetLatencyMarkerInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_latency_timings_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetLatencyTimingsNV\0"); let val = _f(cname); if val.is_null() { get_latency_timings_nv } else { ::core::mem::transmute(val) } }, queue_notify_out_of_band_nv: unsafe { unsafe extern "system" fn queue_notify_out_of_band_nv( _queue: Queue, _p_queue_type_info: *const OutOfBandQueueTypeInfoNV<'_>, ) { panic!(concat!( "Unable to load ", stringify!(queue_notify_out_of_band_nv) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueNotifyOutOfBandNV\0"); let val = _f(cname); if val.is_null() { queue_notify_out_of_band_nv } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NV_per_stage_descriptor_set"] pub mod per_stage_descriptor_set { use super::super::*; pub use { crate::vk::NV_PER_STAGE_DESCRIPTOR_SET_NAME as NAME, crate::vk::NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_descriptor_pool_overallocation"] pub mod descriptor_pool_overallocation { use super::super::*; pub use { crate::vk::NV_DESCRIPTOR_POOL_OVERALLOCATION_NAME as NAME, crate::vk::NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_raw_access_chains"] pub mod raw_access_chains { use super::super::*; pub use { crate::vk::NV_RAW_ACCESS_CHAINS_NAME as NAME, crate::vk::NV_RAW_ACCESS_CHAINS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_shader_atomic_float16_vector"] pub mod shader_atomic_float16_vector { use super::super::*; pub use { crate::vk::NV_SHADER_ATOMIC_FLOAT16_VECTOR_NAME as NAME, crate::vk::NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_NV_ray_tracing_validation"] pub mod ray_tracing_validation { use super::super::*; pub use { crate::vk::NV_RAY_TRACING_VALIDATION_NAME as NAME, crate::vk::NV_RAY_TRACING_VALIDATION_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged NVX"] pub mod nvx { #[doc = "VK_NVX_binary_import"] pub mod binary_import { use super::super::*; pub use { crate::vk::NVX_BINARY_IMPORT_NAME as NAME, crate::vk::NVX_BINARY_IMPORT_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NVX_binary_import device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NVX_binary_import device-level function pointers"] pub struct DeviceFn { pub create_cu_module_nvx: PFN_vkCreateCuModuleNVX, pub create_cu_function_nvx: PFN_vkCreateCuFunctionNVX, pub destroy_cu_module_nvx: PFN_vkDestroyCuModuleNVX, pub destroy_cu_function_nvx: PFN_vkDestroyCuFunctionNVX, pub cmd_cu_launch_kernel_nvx: PFN_vkCmdCuLaunchKernelNVX, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_cu_module_nvx: unsafe { unsafe extern "system" fn create_cu_module_nvx( _device: crate::vk::Device, _p_create_info: *const CuModuleCreateInfoNVX<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_module: *mut CuModuleNVX, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_cu_module_nvx))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateCuModuleNVX\0"); let val = _f(cname); if val.is_null() { create_cu_module_nvx } else { ::core::mem::transmute(val) } }, create_cu_function_nvx: unsafe { unsafe extern "system" fn create_cu_function_nvx( _device: crate::vk::Device, _p_create_info: *const CuFunctionCreateInfoNVX<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_function: *mut CuFunctionNVX, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_cu_function_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateCuFunctionNVX\0"); let val = _f(cname); if val.is_null() { create_cu_function_nvx } else { ::core::mem::transmute(val) } }, destroy_cu_module_nvx: unsafe { unsafe extern "system" fn destroy_cu_module_nvx( _device: crate::vk::Device, _module: CuModuleNVX, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_cu_module_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyCuModuleNVX\0"); let val = _f(cname); if val.is_null() { destroy_cu_module_nvx } else { ::core::mem::transmute(val) } }, destroy_cu_function_nvx: unsafe { unsafe extern "system" fn destroy_cu_function_nvx( _device: crate::vk::Device, _function: CuFunctionNVX, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_cu_function_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyCuFunctionNVX\0"); let val = _f(cname); if val.is_null() { destroy_cu_function_nvx } else { ::core::mem::transmute(val) } }, cmd_cu_launch_kernel_nvx: unsafe { unsafe extern "system" fn cmd_cu_launch_kernel_nvx( _command_buffer: CommandBuffer, _p_launch_info: *const CuLaunchInfoNVX<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_cu_launch_kernel_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCuLaunchKernelNVX\0"); let val = _f(cname); if val.is_null() { cmd_cu_launch_kernel_nvx } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NVX_image_view_handle"] pub mod image_view_handle { use super::super::*; pub use { crate::vk::NVX_IMAGE_VIEW_HANDLE_NAME as NAME, crate::vk::NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_NVX_image_view_handle device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_NVX_image_view_handle device-level function pointers"] pub struct DeviceFn { pub get_image_view_handle_nvx: PFN_vkGetImageViewHandleNVX, pub get_image_view_address_nvx: PFN_vkGetImageViewAddressNVX, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_image_view_handle_nvx: unsafe { unsafe extern "system" fn get_image_view_handle_nvx( _device: crate::vk::Device, _p_info: *const ImageViewHandleInfoNVX<'_>, ) -> u32 { panic!(concat!( "Unable to load ", stringify!(get_image_view_handle_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageViewHandleNVX\0"); let val = _f(cname); if val.is_null() { get_image_view_handle_nvx } else { ::core::mem::transmute(val) } }, get_image_view_address_nvx: unsafe { unsafe extern "system" fn get_image_view_address_nvx( _device: crate::vk::Device, _image_view: ImageView, _p_properties: *mut ImageViewAddressPropertiesNVX<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_image_view_address_nvx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageViewAddressNVX\0"); let val = _f(cname); if val.is_null() { get_image_view_address_nvx } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_NVX_multiview_per_view_attributes"] pub mod multiview_per_view_attributes { use super::super::*; pub use { crate::vk::NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_NAME as NAME, crate::vk::NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged QCOM"] pub mod qcom { #[doc = "VK_QCOM_render_pass_shader_resolve"] pub mod render_pass_shader_resolve { use super::super::*; pub use { crate::vk::QCOM_RENDER_PASS_SHADER_RESOLVE_NAME as NAME, crate::vk::QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_render_pass_transform"] pub mod render_pass_transform { use super::super::*; pub use { crate::vk::QCOM_RENDER_PASS_TRANSFORM_NAME as NAME, crate::vk::QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_render_pass_store_ops"] pub mod render_pass_store_ops { use super::super::*; pub use { crate::vk::QCOM_RENDER_PASS_STORE_OPS_NAME as NAME, crate::vk::QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_rotated_copy_commands"] pub mod rotated_copy_commands { use super::super::*; pub use { crate::vk::QCOM_ROTATED_COPY_COMMANDS_NAME as NAME, crate::vk::QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_fragment_density_map_offset"] pub mod fragment_density_map_offset { use super::super::*; pub use { crate::vk::QCOM_FRAGMENT_DENSITY_MAP_OFFSET_NAME as NAME, crate::vk::QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_image_processing"] pub mod image_processing { use super::super::*; pub use { crate::vk::QCOM_IMAGE_PROCESSING_NAME as NAME, crate::vk::QCOM_IMAGE_PROCESSING_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_tile_properties"] pub mod tile_properties { use super::super::*; pub use { crate::vk::QCOM_TILE_PROPERTIES_NAME as NAME, crate::vk::QCOM_TILE_PROPERTIES_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_QCOM_tile_properties device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_QCOM_tile_properties device-level function pointers"] pub struct DeviceFn { pub get_framebuffer_tile_properties_qcom: PFN_vkGetFramebufferTilePropertiesQCOM, pub get_dynamic_rendering_tile_properties_qcom: PFN_vkGetDynamicRenderingTilePropertiesQCOM, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_framebuffer_tile_properties_qcom: unsafe { unsafe extern "system" fn get_framebuffer_tile_properties_qcom( _device: crate::vk::Device, _framebuffer: Framebuffer, _p_properties_count: *mut u32, _p_properties: *mut TilePropertiesQCOM<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_framebuffer_tile_properties_qcom) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetFramebufferTilePropertiesQCOM\0", ); let val = _f(cname); if val.is_null() { get_framebuffer_tile_properties_qcom } else { ::core::mem::transmute(val) } }, get_dynamic_rendering_tile_properties_qcom: unsafe { unsafe extern "system" fn get_dynamic_rendering_tile_properties_qcom( _device: crate::vk::Device, _p_rendering_info: *const RenderingInfo<'_>, _p_properties: *mut TilePropertiesQCOM<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_dynamic_rendering_tile_properties_qcom) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDynamicRenderingTilePropertiesQCOM\0", ); let val = _f(cname); if val.is_null() { get_dynamic_rendering_tile_properties_qcom } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_QCOM_multiview_per_view_viewports"] pub mod multiview_per_view_viewports { use super::super::*; pub use { crate::vk::QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_NAME as NAME, crate::vk::QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_multiview_per_view_render_areas"] pub mod multiview_per_view_render_areas { use super::super::*; pub use { crate::vk::QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_NAME as NAME, crate::vk::QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_image_processing2"] pub mod image_processing2 { use super::super::*; pub use { crate::vk::QCOM_IMAGE_PROCESSING2_NAME as NAME, crate::vk::QCOM_IMAGE_PROCESSING2_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_filter_cubic_weights"] pub mod filter_cubic_weights { use super::super::*; pub use { crate::vk::QCOM_FILTER_CUBIC_WEIGHTS_NAME as NAME, crate::vk::QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_ycbcr_degamma"] pub mod ycbcr_degamma { use super::super::*; pub use { crate::vk::QCOM_YCBCR_DEGAMMA_NAME as NAME, crate::vk::QCOM_YCBCR_DEGAMMA_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_QCOM_filter_cubic_clamp"] pub mod filter_cubic_clamp { use super::super::*; pub use { crate::vk::QCOM_FILTER_CUBIC_CLAMP_NAME as NAME, crate::vk::QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged QNX"] pub mod qnx { #[doc = "VK_QNX_screen_surface"] pub mod screen_surface { use super::super::*; pub use { crate::vk::QNX_SCREEN_SURFACE_NAME as NAME, crate::vk::QNX_SCREEN_SURFACE_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_QNX_screen_surface instance-level functions"] #[derive(Clone)] pub struct Instance { pub(crate) fp: InstanceFn, pub(crate) handle: crate::vk::Instance, } impl Instance { pub fn new(entry: &crate::Entry, instance: &crate::Instance) -> Self { let handle = instance.handle(); let fp = InstanceFn::load(|name| unsafe { core::mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &InstanceFn { &self.fp } #[inline] pub fn instance(&self) -> crate::vk::Instance { self.handle } } #[derive(Clone)] #[doc = "Raw VK_QNX_screen_surface instance-level function pointers"] pub struct InstanceFn { pub create_screen_surface_qnx: PFN_vkCreateScreenSurfaceQNX, pub get_physical_device_screen_presentation_support_qnx: PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX, } unsafe impl Send for InstanceFn {} unsafe impl Sync for InstanceFn {} impl InstanceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_screen_surface_qnx: unsafe { unsafe extern "system" fn create_screen_surface_qnx( _instance: crate::vk::Instance, _p_create_info: *const ScreenSurfaceCreateInfoQNX<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_surface: *mut SurfaceKHR, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_screen_surface_qnx) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateScreenSurfaceQNX\0"); let val = _f(cname); if val.is_null() { create_screen_surface_qnx } else { ::core::mem::transmute(val) } }, get_physical_device_screen_presentation_support_qnx: unsafe { unsafe extern "system" fn get_physical_device_screen_presentation_support_qnx( _physical_device: PhysicalDevice, _queue_family_index: u32, _window: *mut _screen_window, ) -> Bool32 { panic!(concat!( "Unable to load ", stringify!(get_physical_device_screen_presentation_support_qnx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceScreenPresentationSupportQNX\0", ); let val = _f(cname); if val.is_null() { get_physical_device_screen_presentation_support_qnx } else { ::core::mem::transmute(val) } }, } } } } #[doc = "VK_QNX_external_memory_screen_buffer"] pub mod external_memory_screen_buffer { use super::super::*; pub use { crate::vk::QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_NAME as NAME, crate::vk::QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_QNX_external_memory_screen_buffer device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_QNX_external_memory_screen_buffer device-level function pointers"] pub struct DeviceFn { pub get_screen_buffer_properties_qnx: PFN_vkGetScreenBufferPropertiesQNX, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_screen_buffer_properties_qnx: unsafe { unsafe extern "system" fn get_screen_buffer_properties_qnx( _device: crate::vk::Device, _buffer: *const _screen_buffer, _p_properties: *mut ScreenBufferPropertiesQNX<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_screen_buffer_properties_qnx) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetScreenBufferPropertiesQNX\0", ); let val = _f(cname); if val.is_null() { get_screen_buffer_properties_qnx } else { ::core::mem::transmute(val) } }, } } } } } #[doc = "Extensions tagged SEC"] pub mod sec { #[doc = "VK_SEC_amigo_profiling"] pub mod amigo_profiling { use super::super::*; pub use { crate::vk::SEC_AMIGO_PROFILING_NAME as NAME, crate::vk::SEC_AMIGO_PROFILING_SPEC_VERSION as SPEC_VERSION, }; } } #[doc = "Extensions tagged VALVE"] pub mod valve { #[doc = "VK_VALVE_mutable_descriptor_type"] pub mod mutable_descriptor_type { use super::super::*; pub use { crate::vk::VALVE_MUTABLE_DESCRIPTOR_TYPE_NAME as NAME, crate::vk::VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION as SPEC_VERSION, }; } #[doc = "VK_VALVE_descriptor_set_host_mapping"] pub mod descriptor_set_host_mapping { use super::super::*; pub use { crate::vk::VALVE_DESCRIPTOR_SET_HOST_MAPPING_NAME as NAME, crate::vk::VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION as SPEC_VERSION, }; #[doc = "VK_VALVE_descriptor_set_host_mapping device-level functions"] #[derive(Clone)] pub struct Device { pub(crate) fp: DeviceFn, pub(crate) handle: crate::vk::Device, } impl Device { pub fn new(instance: &crate::Instance, device: &crate::Device) -> Self { let handle = device.handle(); let fp = DeviceFn::load(|name| unsafe { core::mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr())) }); Self { handle, fp } } #[inline] pub fn fp(&self) -> &DeviceFn { &self.fp } #[inline] pub fn device(&self) -> crate::vk::Device { self.handle } } #[derive(Clone)] #[doc = "Raw VK_VALVE_descriptor_set_host_mapping device-level function pointers"] pub struct DeviceFn { pub get_descriptor_set_layout_host_mapping_info_valve: PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE, pub get_descriptor_set_host_mapping_valve: PFN_vkGetDescriptorSetHostMappingVALVE, } unsafe impl Send for DeviceFn {} unsafe impl Sync for DeviceFn {} impl DeviceFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_descriptor_set_layout_host_mapping_info_valve: unsafe { unsafe extern "system" fn get_descriptor_set_layout_host_mapping_info_valve( _device: crate::vk::Device, _p_binding_reference: *const DescriptorSetBindingReferenceVALVE<'_>, _p_host_mapping: *mut DescriptorSetLayoutHostMappingInfoVALVE<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_layout_host_mapping_info_valve) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDescriptorSetLayoutHostMappingInfoVALVE\0", ); let val = _f(cname); if val.is_null() { get_descriptor_set_layout_host_mapping_info_valve } else { ::core::mem::transmute(val) } }, get_descriptor_set_host_mapping_valve: unsafe { unsafe extern "system" fn get_descriptor_set_host_mapping_valve( _device: crate::vk::Device, _descriptor_set: DescriptorSet, _pp_data: *mut *mut c_void, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_host_mapping_valve) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDescriptorSetHostMappingVALVE\0", ); let val = _f(cname); if val.is_null() { get_descriptor_set_host_mapping_valve } else { ::core::mem::transmute(val) } }, } } } } } ash-0.38.0+1.3.281/src/instance.rs000064400000000000000000000476321046102023000142520ustar 00000000000000#[cfg(doc)] use super::Entry; use crate::device::Device; use crate::prelude::*; use crate::vk; use crate::RawPtr; use alloc::vec::Vec; use core::ffi; use core::mem; use core::ptr; /// #[derive(Clone)] pub struct Instance { pub(crate) handle: vk::Instance, pub(crate) instance_fn_1_0: crate::InstanceFnV1_0, pub(crate) instance_fn_1_1: crate::InstanceFnV1_1, pub(crate) instance_fn_1_3: crate::InstanceFnV1_3, } impl Instance { pub unsafe fn load(static_fn: &crate::StaticFn, instance: vk::Instance) -> Self { Self::load_with( |name| mem::transmute((static_fn.get_instance_proc_addr)(instance, name.as_ptr())), instance, ) } pub unsafe fn load_with( mut load_fn: impl FnMut(&ffi::CStr) -> *const ffi::c_void, instance: vk::Instance, ) -> Self { Self::from_parts_1_3( instance, crate::InstanceFnV1_0::load(&mut load_fn), crate::InstanceFnV1_1::load(&mut load_fn), crate::InstanceFnV1_3::load(&mut load_fn), ) } #[inline] pub fn from_parts_1_3( handle: vk::Instance, instance_fn_1_0: crate::InstanceFnV1_0, instance_fn_1_1: crate::InstanceFnV1_1, instance_fn_1_3: crate::InstanceFnV1_3, ) -> Self { Self { handle, instance_fn_1_0, instance_fn_1_1, instance_fn_1_3, } } #[inline] pub fn handle(&self) -> vk::Instance { self.handle } } /// Vulkan core 1.3 impl Instance { #[inline] pub fn fp_v1_3(&self) -> &crate::InstanceFnV1_3 { &self.instance_fn_1_3 } /// Retrieve the number of elements to pass to [`get_physical_device_tool_properties()`][Self::get_physical_device_tool_properties()] #[inline] pub unsafe fn get_physical_device_tool_properties_len( &self, physical_device: vk::PhysicalDevice, ) -> VkResult { let mut count = mem::MaybeUninit::uninit(); (self.instance_fn_1_3.get_physical_device_tool_properties)( physical_device, count.as_mut_ptr(), ptr::null_mut(), ) .assume_init_on_success(count) .map(|c| c as usize) } /// /// /// Call [`get_physical_device_tool_properties_len()`][Self::get_physical_device_tool_properties_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_tool_properties( &self, physical_device: vk::PhysicalDevice, out: &mut [vk::PhysicalDeviceToolProperties<'_>], ) -> VkResult<()> { let mut count = out.len() as u32; (self.instance_fn_1_3.get_physical_device_tool_properties)( physical_device, &mut count, out.as_mut_ptr(), ) .result()?; assert_eq!(count as usize, out.len()); Ok(()) } } /// Vulkan core 1.1 impl Instance { #[inline] pub fn fp_v1_1(&self) -> &crate::InstanceFnV1_1 { &self.instance_fn_1_1 } /// Retrieve the number of elements to pass to [`enumerate_physical_device_groups()`][Self::enumerate_physical_device_groups()] #[inline] pub unsafe fn enumerate_physical_device_groups_len(&self) -> VkResult { let mut group_count = mem::MaybeUninit::uninit(); (self.instance_fn_1_1.enumerate_physical_device_groups)( self.handle(), group_count.as_mut_ptr(), ptr::null_mut(), ) .assume_init_on_success(group_count) .map(|c| c as usize) } /// /// /// Call [`enumerate_physical_device_groups_len()`][Self::enumerate_physical_device_groups_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn enumerate_physical_device_groups( &self, out: &mut [vk::PhysicalDeviceGroupProperties<'_>], ) -> VkResult<()> { let mut count = out.len() as u32; (self.instance_fn_1_1.enumerate_physical_device_groups)( self.handle(), &mut count, out.as_mut_ptr(), ) .result()?; assert_eq!(count as usize, out.len()); Ok(()) } /// #[inline] pub unsafe fn get_physical_device_features2( &self, physical_device: vk::PhysicalDevice, features: &mut vk::PhysicalDeviceFeatures2<'_>, ) { (self.instance_fn_1_1.get_physical_device_features2)(physical_device, features); } /// #[inline] pub unsafe fn get_physical_device_properties2( &self, physical_device: vk::PhysicalDevice, prop: &mut vk::PhysicalDeviceProperties2<'_>, ) { (self.instance_fn_1_1.get_physical_device_properties2)(physical_device, prop); } /// #[inline] pub unsafe fn get_physical_device_format_properties2( &self, physical_device: vk::PhysicalDevice, format: vk::Format, out: &mut vk::FormatProperties2<'_>, ) { (self.instance_fn_1_1.get_physical_device_format_properties2)(physical_device, format, out); } /// #[inline] pub unsafe fn get_physical_device_image_format_properties2( &self, physical_device: vk::PhysicalDevice, format_info: &vk::PhysicalDeviceImageFormatInfo2<'_>, image_format_prop: &mut vk::ImageFormatProperties2<'_>, ) -> VkResult<()> { (self .instance_fn_1_1 .get_physical_device_image_format_properties2)( physical_device, format_info, image_format_prop, ) .result() } /// Retrieve the number of elements to pass to [`get_physical_device_queue_family_properties2()`][Self::get_physical_device_queue_family_properties2()] #[inline] pub unsafe fn get_physical_device_queue_family_properties2_len( &self, physical_device: vk::PhysicalDevice, ) -> usize { let mut queue_count = mem::MaybeUninit::uninit(); (self .instance_fn_1_1 .get_physical_device_queue_family_properties2)( physical_device, queue_count.as_mut_ptr(), ptr::null_mut(), ); queue_count.assume_init() as usize } /// /// /// Call [`get_physical_device_queue_family_properties2_len()`][Self::get_physical_device_queue_family_properties2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_queue_family_properties2( &self, physical_device: vk::PhysicalDevice, out: &mut [vk::QueueFamilyProperties2<'_>], ) { let mut count = out.len() as u32; (self .instance_fn_1_1 .get_physical_device_queue_family_properties2)( physical_device, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } /// #[inline] pub unsafe fn get_physical_device_memory_properties2( &self, physical_device: vk::PhysicalDevice, out: &mut vk::PhysicalDeviceMemoryProperties2<'_>, ) { (self.instance_fn_1_1.get_physical_device_memory_properties2)(physical_device, out); } /// Retrieve the number of elements to pass to [`get_physical_device_sparse_image_format_properties2()`][Self::get_physical_device_sparse_image_format_properties2()] #[inline] pub unsafe fn get_physical_device_sparse_image_format_properties2_len( &self, physical_device: vk::PhysicalDevice, format_info: &vk::PhysicalDeviceSparseImageFormatInfo2<'_>, ) -> usize { let mut format_count = mem::MaybeUninit::uninit(); (self .instance_fn_1_1 .get_physical_device_sparse_image_format_properties2)( physical_device, format_info, format_count.as_mut_ptr(), ptr::null_mut(), ); format_count.assume_init() as usize } /// /// /// Call [`get_physical_device_sparse_image_format_properties2_len()`][Self::get_physical_device_sparse_image_format_properties2_len()] to query the number of elements to pass to `out`. /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer. #[inline] pub unsafe fn get_physical_device_sparse_image_format_properties2( &self, physical_device: vk::PhysicalDevice, format_info: &vk::PhysicalDeviceSparseImageFormatInfo2<'_>, out: &mut [vk::SparseImageFormatProperties2<'_>], ) { let mut count = out.len() as u32; (self .instance_fn_1_1 .get_physical_device_sparse_image_format_properties2)( physical_device, format_info, &mut count, out.as_mut_ptr(), ); assert_eq!(count as usize, out.len()); } /// #[inline] pub unsafe fn get_physical_device_external_buffer_properties( &self, physical_device: vk::PhysicalDevice, external_buffer_info: &vk::PhysicalDeviceExternalBufferInfo<'_>, out: &mut vk::ExternalBufferProperties<'_>, ) { (self .instance_fn_1_1 .get_physical_device_external_buffer_properties)( physical_device, external_buffer_info, out, ); } /// #[inline] pub unsafe fn get_physical_device_external_fence_properties( &self, physical_device: vk::PhysicalDevice, external_fence_info: &vk::PhysicalDeviceExternalFenceInfo<'_>, out: &mut vk::ExternalFenceProperties<'_>, ) { (self .instance_fn_1_1 .get_physical_device_external_fence_properties)( physical_device, external_fence_info, out, ); } /// #[inline] pub unsafe fn get_physical_device_external_semaphore_properties( &self, physical_device: vk::PhysicalDevice, external_semaphore_info: &vk::PhysicalDeviceExternalSemaphoreInfo<'_>, out: &mut vk::ExternalSemaphoreProperties<'_>, ) { (self .instance_fn_1_1 .get_physical_device_external_semaphore_properties)( physical_device, external_semaphore_info, out, ); } } /// Vulkan core 1.0 impl Instance { #[inline] pub fn fp_v1_0(&self) -> &crate::InstanceFnV1_0 { &self.instance_fn_1_0 } /// /// /// # Safety /// /// There is a [parent/child relation] between [`Instance`] and the resulting [`Device`]. The /// application must not [destroy][Instance::destroy_instance()] the parent [`Instance`] object /// before first [destroying][Device::destroy_device()] the returned [`Device`] child object. /// [`Device`] does _not_ implement [drop][drop()] semantics and can only be destroyed via /// [`destroy_device()`][Device::destroy_device()]. /// /// See the [`Entry::create_instance()`] documentation for more destruction ordering rules on /// [`Instance`]. /// /// [parent/child relation]: https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#fundamentals-objectmodel-lifetime #[inline] pub unsafe fn create_device( &self, physical_device: vk::PhysicalDevice, create_info: &vk::DeviceCreateInfo<'_>, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) -> VkResult { let mut device = mem::MaybeUninit::uninit(); let device = (self.instance_fn_1_0.create_device)( physical_device, create_info, allocation_callbacks.as_raw_ptr(), device.as_mut_ptr(), ) .assume_init_on_success(device)?; Ok(Device::load(&self.instance_fn_1_0, device)) } /// #[inline] pub unsafe fn get_device_proc_addr( &self, device: vk::Device, p_name: *const ffi::c_char, ) -> vk::PFN_vkVoidFunction { (self.instance_fn_1_0.get_device_proc_addr)(device, p_name) } /// #[inline] pub unsafe fn destroy_instance( &self, allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>, ) { (self.instance_fn_1_0.destroy_instance)(self.handle(), allocation_callbacks.as_raw_ptr()); } /// #[inline] pub unsafe fn get_physical_device_format_properties( &self, physical_device: vk::PhysicalDevice, format: vk::Format, ) -> vk::FormatProperties { let mut format_prop = mem::MaybeUninit::uninit(); (self.instance_fn_1_0.get_physical_device_format_properties)( physical_device, format, format_prop.as_mut_ptr(), ); format_prop.assume_init() } /// #[inline] pub unsafe fn get_physical_device_image_format_properties( &self, physical_device: vk::PhysicalDevice, format: vk::Format, typ: vk::ImageType, tiling: vk::ImageTiling, usage: vk::ImageUsageFlags, flags: vk::ImageCreateFlags, ) -> VkResult { let mut image_format_prop = mem::MaybeUninit::uninit(); (self .instance_fn_1_0 .get_physical_device_image_format_properties)( physical_device, format, typ, tiling, usage, flags, image_format_prop.as_mut_ptr(), ) .assume_init_on_success(image_format_prop) } /// #[inline] pub unsafe fn get_physical_device_memory_properties( &self, physical_device: vk::PhysicalDevice, ) -> vk::PhysicalDeviceMemoryProperties { let mut memory_prop = mem::MaybeUninit::uninit(); (self.instance_fn_1_0.get_physical_device_memory_properties)( physical_device, memory_prop.as_mut_ptr(), ); memory_prop.assume_init() } /// #[inline] pub unsafe fn get_physical_device_properties( &self, physical_device: vk::PhysicalDevice, ) -> vk::PhysicalDeviceProperties { let mut prop = mem::MaybeUninit::uninit(); (self.instance_fn_1_0.get_physical_device_properties)(physical_device, prop.as_mut_ptr()); prop.assume_init() } /// #[inline] pub unsafe fn get_physical_device_queue_family_properties( &self, physical_device: vk::PhysicalDevice, ) -> Vec { read_into_uninitialized_vector(|count, data| { (self .instance_fn_1_0 .get_physical_device_queue_family_properties)( physical_device, count, data ); vk::Result::SUCCESS }) // The closure always returns SUCCESS .unwrap() } /// #[inline] pub unsafe fn get_physical_device_features( &self, physical_device: vk::PhysicalDevice, ) -> vk::PhysicalDeviceFeatures { let mut prop = mem::MaybeUninit::uninit(); (self.instance_fn_1_0.get_physical_device_features)(physical_device, prop.as_mut_ptr()); prop.assume_init() } /// #[inline] pub unsafe fn enumerate_physical_devices(&self) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.instance_fn_1_0.enumerate_physical_devices)(self.handle(), count, data) }) } /// #[inline] pub unsafe fn enumerate_device_extension_properties( &self, device: vk::PhysicalDevice, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.instance_fn_1_0.enumerate_device_extension_properties)( device, ptr::null(), count, data, ) }) } /// #[inline] pub unsafe fn enumerate_device_layer_properties( &self, device: vk::PhysicalDevice, ) -> VkResult> { read_into_uninitialized_vector(|count, data| { (self.instance_fn_1_0.enumerate_device_layer_properties)(device, count, data) }) } /// #[inline] pub unsafe fn get_physical_device_sparse_image_format_properties( &self, physical_device: vk::PhysicalDevice, format: vk::Format, typ: vk::ImageType, samples: vk::SampleCountFlags, usage: vk::ImageUsageFlags, tiling: vk::ImageTiling, ) -> Vec { read_into_uninitialized_vector(|count, data| { (self .instance_fn_1_0 .get_physical_device_sparse_image_format_properties)( physical_device, format, typ, samples, usage, tiling, count, data, ); vk::Result::SUCCESS }) // The closure always returns SUCCESS .unwrap() } } ash-0.38.0+1.3.281/src/lib.rs000064400000000000000000000156771046102023000132200ustar 00000000000000#![warn( clippy::alloc_instead_of_core, clippy::use_self, clippy::std_instead_of_alloc, clippy::std_instead_of_core, deprecated_in_future, rust_2018_idioms, trivial_casts, trivial_numeric_casts, unused_qualifications )] #![allow( clippy::too_many_arguments, clippy::missing_safety_doc, clippy::upper_case_acronyms )] #![cfg_attr(docsrs, feature(doc_cfg))] #![cfg_attr(not(feature = "std"), no_std)] //! # Vulkan API //! //! //! //! ## Examples //! //! ```no_run //! use ash::{vk, Entry}; //! # fn main() -> Result<(), Box> { //! let entry = Entry::linked(); //! let app_info = vk::ApplicationInfo { //! api_version: vk::make_api_version(0, 1, 0, 0), //! ..Default::default() //! }; //! let create_info = vk::InstanceCreateInfo { //! p_application_info: &app_info, //! ..Default::default() //! }; //! let instance = unsafe { entry.create_instance(&create_info, None)? }; //! # Ok(()) } //! ``` //! //! ## Getting started //! //! Load the Vulkan library linked at compile time using [`Entry::linked()`], or load it at runtime //! using [`Entry::load()`], which uses `libloading`. If you want to perform entry point loading //! yourself, call [`Entry::from_static_fn()`]. //! //! ## Crate features //! //! * **debug** (default): Whether Vulkan structs should implement `Debug`. //! * **loaded** (default): Support searching for the Vulkan loader manually at runtime. //! * **linked**: Link the Vulkan loader at compile time. //! * **std** (default): Whether ash depends on the standard library (otherwise `alloc` is required) extern crate alloc; pub use crate::device::Device; pub use crate::entry::Entry; #[cfg(feature = "loaded")] pub use crate::entry::LoadingError; pub use crate::extensions_generated::*; pub use crate::instance::Instance; pub use crate::tables::*; mod device; mod entry; mod extensions_generated; mod instance; pub mod prelude; mod tables; pub mod util; /// Raw Vulkan bindings and types, generated from `vk.xml` #[macro_use] pub mod vk; // macros of vk need to be defined beforehand /// Hand-written ergonomic wrappers for extension functions mod extensions; pub trait RawPtr { fn as_raw_ptr(&self) -> *const T; } impl<'r, T> RawPtr for Option<&'r T> { fn as_raw_ptr(&self) -> *const T { match *self { Some(inner) => inner, _ => ::core::ptr::null(), } } } /// Given a mutable raw pointer to a type with an `s_type` member such as [`vk::BaseOutStructure`], /// match on a set of Vulkan structures. The struct will be rebound to the given variable of the /// type of the given Vulkan structure. /// /// Note that all match bodies have to be enclosed by curly braces due to macro parsing limitations. /// It is unfortunately not possible to write `x @ ash::vk::SomeStruct => one_line_expression(),`. /// /// ``` /// let mut info = ash::vk::DeviceCreateInfo::default(); /// let info: *mut ash::vk::BaseOutStructure = <*mut _>::cast(&mut info); /// unsafe { /// ash::match_out_struct!(match info { /// info @ ash::vk::DeviceQueueCreateInfo => { /// dbg!(&info); // Unreachable /// } /// info @ ash::vk::DeviceCreateInfo => { /// dbg!(&info); /// } /// }) /// } /// ``` /// /// In addition this macro propagates implicit return values just like normal `match` blocks, as /// long as a default value or expression is provided in the "any" match arm /// (`_ => { some_value() }`). For the time being said arm must be wrapped in curly braces; an /// expression like `_ => None` is not yet supported. /// /// ``` /// # let mut info = ash::vk::DeviceCreateInfo::default(); /// # let info: *mut ash::vk::BaseOutStructure = <*mut _>::cast(&mut info); /// let device_create_flags: Option = unsafe { /// ash::match_out_struct!(match info { /// info @ ash::vk::DeviceQueueCreateInfo => { /// dbg!(&info); // Unreachable /// Some(ash::vk::DeviceCreateFlags::empty()) /// } /// info @ ash::vk::DeviceCreateInfo => { /// dbg!(&info); /// Some(info.flags) /// } /// _ => { /// None /// } /// }) /// }; /// ``` #[macro_export] macro_rules! match_out_struct { (match $p:ident { $($bind:ident @ $ty:path => $body:block $(,)?)+ $(_ => $any:block $(,)?)? }) => { match core::ptr::addr_of!((*$p).s_type).read() { $(<$ty as $crate::vk::TaggedStructure>::STRUCTURE_TYPE => { let $bind = $p .cast::<$ty>() .as_mut() .unwrap(); $body }),+ _ => { $($any)? } } }; } /// Given an immutable raw pointer to a type with an `s_type` member such as [`vk::BaseInStructure`], /// match on a set of Vulkan structures. The struct will be rebound to the given variable of the /// type of the given Vulkan structure. /// /// Note that all match bodies have to be enclosed by curly braces due to macro parsing limitations. /// It is unfortunately not possible to write `x @ ash::vk::SomeStruct => one_line_expression(),`. /// /// ``` /// let info = ash::vk::DeviceCreateInfo::default(); /// let info: *const ash::vk::BaseInStructure = <*const _>::cast(&info); /// unsafe { /// ash::match_in_struct!(match info { /// info @ ash::vk::DeviceQueueCreateInfo => { /// dbg!(&info); // Unreachable /// } /// info @ ash::vk::DeviceCreateInfo => { /// dbg!(&info); /// } /// }) /// } /// ``` /// /// See the [`match_out_struct!`] documentation for an example with implicit return values. #[macro_export] macro_rules! match_in_struct { (match $p:ident { $($bind:ident @ $ty:path => $body:block $(,)?)+ $(_ => $any:block $(,)?)? }) => { match core::ptr::addr_of!((*$p).s_type).read() { $(<$ty as $crate::vk::TaggedStructure>::STRUCTURE_TYPE => { let $bind = $p .cast::<$ty>() .as_ref() .unwrap(); $body }),+ _ => { $($any)? } } }; } #[cfg(test)] mod tests { use super::vk; use alloc::vec::Vec; #[test] fn test_ptr_chains() { let mut variable_pointers = vk::PhysicalDeviceVariablePointerFeatures::default(); let mut corner = vk::PhysicalDeviceCornerSampledImageFeaturesNV::default(); let chain = alloc::vec![ <*mut _>::cast(&mut variable_pointers), <*mut _>::cast(&mut corner), ]; let mut device_create_info = vk::DeviceCreateInfo::default() .push_next(&mut corner) .push_next(&mut variable_pointers); let chain2: Vec<*mut vk::BaseOutStructure<'_>> = unsafe { vk::ptr_chain_iter(&mut device_create_info) .skip(1) .collect() }; assert_eq!(chain, chain2); } } ash-0.38.0+1.3.281/src/prelude.rs000064400000000000000000000106001046102023000140670ustar 00000000000000use alloc::vec::Vec; use core::convert::TryInto; use core::mem; use core::ptr; use crate::vk; pub type VkResult = Result; impl vk::Result { #[inline] pub fn result(self) -> VkResult<()> { self.result_with_success(()) } #[inline] pub fn result_with_success(self, v: T) -> VkResult { match self { Self::SUCCESS => Ok(v), _ => Err(self), } } #[inline] pub unsafe fn assume_init_on_success(self, v: mem::MaybeUninit) -> VkResult { self.result().map(move |()| v.assume_init()) } #[inline] pub unsafe fn set_vec_len_on_success(self, mut v: Vec, len: usize) -> VkResult> { self.result().map(move |()| { v.set_len(len); v }) } } /// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`] anymore, ensuring all /// available data has been read into the vector. /// /// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of available items may /// change between calls; [`vk::Result::INCOMPLETE`] is returned when the count increased (and the /// vector is not large enough after querying the initial size), requiring Ash to try again. /// /// [`vkEnumerateInstanceExtensionProperties`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html pub(crate) unsafe fn read_into_uninitialized_vector, T>( f: impl Fn(&mut N, *mut T) -> vk::Result, ) -> VkResult> where >::Error: core::fmt::Debug, { loop { let mut count = N::default(); f(&mut count, ptr::null_mut()).result()?; let mut data = Vec::with_capacity(count.try_into().expect("`N` failed to convert to `usize`")); let err_code = f(&mut count, data.as_mut_ptr()); if err_code != vk::Result::INCOMPLETE { break err_code.set_vec_len_on_success( data, count.try_into().expect("`N` failed to convert to `usize`"), ); } } } /// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`] anymore, ensuring all /// available data has been read into the vector. /// /// Items in the target vector are [`default()`][Default::default()]-initialized which is required /// for [`vk::BaseOutStructure`]-like structs where [`vk::BaseOutStructure::s_type`] needs to be a /// valid type and [`vk::BaseOutStructure::p_next`] a valid or [`null`][ptr::null_mut()] /// pointer. /// /// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of available items may /// change between calls; [`vk::Result::INCOMPLETE`] is returned when the count increased (and the /// vector is not large enough after querying the initial size), requiring Ash to try again. /// /// [`vkEnumerateInstanceExtensionProperties`]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html pub(crate) unsafe fn read_into_defaulted_vector< N: Copy + Default + TryInto, T: Default + Clone, >( f: impl Fn(&mut N, *mut T) -> vk::Result, ) -> VkResult> where >::Error: core::fmt::Debug, { loop { let mut count = N::default(); f(&mut count, ptr::null_mut()).result()?; let mut data = alloc::vec![Default::default(); count.try_into().expect("`N` failed to convert to `usize`")]; let err_code = f(&mut count, data.as_mut_ptr()); if err_code != vk::Result::INCOMPLETE { break err_code.set_vec_len_on_success( data, count.try_into().expect("`N` failed to convert to `usize`"), ); } } } #[cfg(feature = "debug")] pub(crate) fn debug_flags + Copy>( f: &mut core::fmt::Formatter<'_>, known: &[(Value, &'static str)], value: Value, ) -> core::fmt::Result { let mut first = true; let mut accum = value.into(); for &(bit, name) in known { let bit = bit.into(); if bit != 0 && accum & bit == bit { if !first { f.write_str(" | ")?; } f.write_str(name)?; first = false; accum &= !bit; } } if accum != 0 { if !first { f.write_str(" | ")?; } write!(f, "{accum:b}")?; } Ok(()) } ash-0.38.0+1.3.281/src/tables.rs000064400000000000000000005537301046102023000137210ustar 00000000000000#![allow(unused_qualifications)] use crate::vk::*; use core::ffi::*; #[derive(Clone)] #[doc = "Raw Vulkan 1 static function pointers"] pub struct StaticFn { pub get_instance_proc_addr: PFN_vkGetInstanceProcAddr, } unsafe impl Send for StaticFn {} unsafe impl Sync for StaticFn {} impl StaticFn { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_instance_proc_addr: unsafe { unsafe extern "system" fn get_instance_proc_addr( _instance: crate::vk::Instance, _p_name: *const c_char, ) -> PFN_vkVoidFunction { panic!(concat!( "Unable to load ", stringify!(get_instance_proc_addr) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetInstanceProcAddr\0"); let val = _f(cname); if val.is_null() { get_instance_proc_addr } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1 entry point function pointers"] pub struct EntryFnV1_0 { pub create_instance: PFN_vkCreateInstance, pub enumerate_instance_extension_properties: PFN_vkEnumerateInstanceExtensionProperties, pub enumerate_instance_layer_properties: PFN_vkEnumerateInstanceLayerProperties, } unsafe impl Send for EntryFnV1_0 {} unsafe impl Sync for EntryFnV1_0 {} impl EntryFnV1_0 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_instance: unsafe { unsafe extern "system" fn create_instance( _p_create_info: *const InstanceCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_instance: *mut crate::vk::Instance, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_instance))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateInstance\0"); let val = _f(cname); if val.is_null() { create_instance } else { ::core::mem::transmute(val) } }, enumerate_instance_extension_properties: unsafe { unsafe extern "system" fn enumerate_instance_extension_properties( _p_layer_name: *const c_char, _p_property_count: *mut u32, _p_properties: *mut ExtensionProperties, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_instance_extension_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkEnumerateInstanceExtensionProperties\0", ); let val = _f(cname); if val.is_null() { enumerate_instance_extension_properties } else { ::core::mem::transmute(val) } }, enumerate_instance_layer_properties: unsafe { unsafe extern "system" fn enumerate_instance_layer_properties( _p_property_count: *mut u32, _p_properties: *mut LayerProperties, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_instance_layer_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumerateInstanceLayerProperties\0"); let val = _f(cname); if val.is_null() { enumerate_instance_layer_properties } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1 instance-level function pointers"] pub struct InstanceFnV1_0 { pub destroy_instance: PFN_vkDestroyInstance, pub enumerate_physical_devices: PFN_vkEnumeratePhysicalDevices, pub get_physical_device_features: PFN_vkGetPhysicalDeviceFeatures, pub get_physical_device_format_properties: PFN_vkGetPhysicalDeviceFormatProperties, pub get_physical_device_image_format_properties: PFN_vkGetPhysicalDeviceImageFormatProperties, pub get_physical_device_properties: PFN_vkGetPhysicalDeviceProperties, pub get_physical_device_queue_family_properties: PFN_vkGetPhysicalDeviceQueueFamilyProperties, pub get_physical_device_memory_properties: PFN_vkGetPhysicalDeviceMemoryProperties, pub get_device_proc_addr: PFN_vkGetDeviceProcAddr, pub create_device: PFN_vkCreateDevice, pub enumerate_device_extension_properties: PFN_vkEnumerateDeviceExtensionProperties, pub enumerate_device_layer_properties: PFN_vkEnumerateDeviceLayerProperties, pub get_physical_device_sparse_image_format_properties: PFN_vkGetPhysicalDeviceSparseImageFormatProperties, } unsafe impl Send for InstanceFnV1_0 {} unsafe impl Sync for InstanceFnV1_0 {} impl InstanceFnV1_0 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { destroy_instance: unsafe { unsafe extern "system" fn destroy_instance( _instance: crate::vk::Instance, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_instance))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyInstance\0"); let val = _f(cname); if val.is_null() { destroy_instance } else { ::core::mem::transmute(val) } }, enumerate_physical_devices: unsafe { unsafe extern "system" fn enumerate_physical_devices( _instance: crate::vk::Instance, _p_physical_device_count: *mut u32, _p_physical_devices: *mut PhysicalDevice, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_physical_devices) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumeratePhysicalDevices\0"); let val = _f(cname); if val.is_null() { enumerate_physical_devices } else { ::core::mem::transmute(val) } }, get_physical_device_features: unsafe { unsafe extern "system" fn get_physical_device_features( _physical_device: PhysicalDevice, _p_features: *mut PhysicalDeviceFeatures, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_features) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceFeatures\0"); let val = _f(cname); if val.is_null() { get_physical_device_features } else { ::core::mem::transmute(val) } }, get_physical_device_format_properties: unsafe { unsafe extern "system" fn get_physical_device_format_properties( _physical_device: PhysicalDevice, _format: Format, _p_format_properties: *mut FormatProperties, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_format_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceFormatProperties\0"); let val = _f(cname); if val.is_null() { get_physical_device_format_properties } else { ::core::mem::transmute(val) } }, get_physical_device_image_format_properties: unsafe { unsafe extern "system" fn get_physical_device_image_format_properties( _physical_device: PhysicalDevice, _format: Format, _ty: ImageType, _tiling: ImageTiling, _usage: ImageUsageFlags, _flags: ImageCreateFlags, _p_image_format_properties: *mut ImageFormatProperties, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_image_format_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceImageFormatProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_image_format_properties } else { ::core::mem::transmute(val) } }, get_physical_device_properties: unsafe { unsafe extern "system" fn get_physical_device_properties( _physical_device: PhysicalDevice, _p_properties: *mut PhysicalDeviceProperties, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceProperties\0"); let val = _f(cname); if val.is_null() { get_physical_device_properties } else { ::core::mem::transmute(val) } }, get_physical_device_queue_family_properties: unsafe { unsafe extern "system" fn get_physical_device_queue_family_properties( _physical_device: PhysicalDevice, _p_queue_family_property_count: *mut u32, _p_queue_family_properties: *mut QueueFamilyProperties, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_queue_family_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceQueueFamilyProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_queue_family_properties } else { ::core::mem::transmute(val) } }, get_physical_device_memory_properties: unsafe { unsafe extern "system" fn get_physical_device_memory_properties( _physical_device: PhysicalDevice, _p_memory_properties: *mut PhysicalDeviceMemoryProperties, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_memory_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceMemoryProperties\0"); let val = _f(cname); if val.is_null() { get_physical_device_memory_properties } else { ::core::mem::transmute(val) } }, get_device_proc_addr: unsafe { unsafe extern "system" fn get_device_proc_addr( _device: crate::vk::Device, _p_name: *const c_char, ) -> PFN_vkVoidFunction { panic!(concat!("Unable to load ", stringify!(get_device_proc_addr))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceProcAddr\0"); let val = _f(cname); if val.is_null() { get_device_proc_addr } else { ::core::mem::transmute(val) } }, create_device: unsafe { unsafe extern "system" fn create_device( _physical_device: PhysicalDevice, _p_create_info: *const DeviceCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_device: *mut crate::vk::Device, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_device))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDevice\0"); let val = _f(cname); if val.is_null() { create_device } else { ::core::mem::transmute(val) } }, enumerate_device_extension_properties: unsafe { unsafe extern "system" fn enumerate_device_extension_properties( _physical_device: PhysicalDevice, _p_layer_name: *const c_char, _p_property_count: *mut u32, _p_properties: *mut ExtensionProperties, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_device_extension_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumerateDeviceExtensionProperties\0"); let val = _f(cname); if val.is_null() { enumerate_device_extension_properties } else { ::core::mem::transmute(val) } }, enumerate_device_layer_properties: unsafe { unsafe extern "system" fn enumerate_device_layer_properties( _physical_device: PhysicalDevice, _p_property_count: *mut u32, _p_properties: *mut LayerProperties, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_device_layer_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumerateDeviceLayerProperties\0"); let val = _f(cname); if val.is_null() { enumerate_device_layer_properties } else { ::core::mem::transmute(val) } }, get_physical_device_sparse_image_format_properties: unsafe { unsafe extern "system" fn get_physical_device_sparse_image_format_properties( _physical_device: PhysicalDevice, _format: Format, _ty: ImageType, _samples: SampleCountFlags, _usage: ImageUsageFlags, _tiling: ImageTiling, _p_property_count: *mut u32, _p_properties: *mut SparseImageFormatProperties, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_sparse_image_format_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSparseImageFormatProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_sparse_image_format_properties } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1 device-level function pointers"] pub struct DeviceFnV1_0 { pub destroy_device: PFN_vkDestroyDevice, pub get_device_queue: PFN_vkGetDeviceQueue, pub queue_submit: PFN_vkQueueSubmit, pub queue_wait_idle: PFN_vkQueueWaitIdle, pub device_wait_idle: PFN_vkDeviceWaitIdle, pub allocate_memory: PFN_vkAllocateMemory, pub free_memory: PFN_vkFreeMemory, pub map_memory: PFN_vkMapMemory, pub unmap_memory: PFN_vkUnmapMemory, pub flush_mapped_memory_ranges: PFN_vkFlushMappedMemoryRanges, pub invalidate_mapped_memory_ranges: PFN_vkInvalidateMappedMemoryRanges, pub get_device_memory_commitment: PFN_vkGetDeviceMemoryCommitment, pub bind_buffer_memory: PFN_vkBindBufferMemory, pub bind_image_memory: PFN_vkBindImageMemory, pub get_buffer_memory_requirements: PFN_vkGetBufferMemoryRequirements, pub get_image_memory_requirements: PFN_vkGetImageMemoryRequirements, pub get_image_sparse_memory_requirements: PFN_vkGetImageSparseMemoryRequirements, pub queue_bind_sparse: PFN_vkQueueBindSparse, pub create_fence: PFN_vkCreateFence, pub destroy_fence: PFN_vkDestroyFence, pub reset_fences: PFN_vkResetFences, pub get_fence_status: PFN_vkGetFenceStatus, pub wait_for_fences: PFN_vkWaitForFences, pub create_semaphore: PFN_vkCreateSemaphore, pub destroy_semaphore: PFN_vkDestroySemaphore, pub create_event: PFN_vkCreateEvent, pub destroy_event: PFN_vkDestroyEvent, pub get_event_status: PFN_vkGetEventStatus, pub set_event: PFN_vkSetEvent, pub reset_event: PFN_vkResetEvent, pub create_query_pool: PFN_vkCreateQueryPool, pub destroy_query_pool: PFN_vkDestroyQueryPool, pub get_query_pool_results: PFN_vkGetQueryPoolResults, pub create_buffer: PFN_vkCreateBuffer, pub destroy_buffer: PFN_vkDestroyBuffer, pub create_buffer_view: PFN_vkCreateBufferView, pub destroy_buffer_view: PFN_vkDestroyBufferView, pub create_image: PFN_vkCreateImage, pub destroy_image: PFN_vkDestroyImage, pub get_image_subresource_layout: PFN_vkGetImageSubresourceLayout, pub create_image_view: PFN_vkCreateImageView, pub destroy_image_view: PFN_vkDestroyImageView, pub create_shader_module: PFN_vkCreateShaderModule, pub destroy_shader_module: PFN_vkDestroyShaderModule, pub create_pipeline_cache: PFN_vkCreatePipelineCache, pub destroy_pipeline_cache: PFN_vkDestroyPipelineCache, pub get_pipeline_cache_data: PFN_vkGetPipelineCacheData, pub merge_pipeline_caches: PFN_vkMergePipelineCaches, pub create_graphics_pipelines: PFN_vkCreateGraphicsPipelines, pub create_compute_pipelines: PFN_vkCreateComputePipelines, pub destroy_pipeline: PFN_vkDestroyPipeline, pub create_pipeline_layout: PFN_vkCreatePipelineLayout, pub destroy_pipeline_layout: PFN_vkDestroyPipelineLayout, pub create_sampler: PFN_vkCreateSampler, pub destroy_sampler: PFN_vkDestroySampler, pub create_descriptor_set_layout: PFN_vkCreateDescriptorSetLayout, pub destroy_descriptor_set_layout: PFN_vkDestroyDescriptorSetLayout, pub create_descriptor_pool: PFN_vkCreateDescriptorPool, pub destroy_descriptor_pool: PFN_vkDestroyDescriptorPool, pub reset_descriptor_pool: PFN_vkResetDescriptorPool, pub allocate_descriptor_sets: PFN_vkAllocateDescriptorSets, pub free_descriptor_sets: PFN_vkFreeDescriptorSets, pub update_descriptor_sets: PFN_vkUpdateDescriptorSets, pub create_framebuffer: PFN_vkCreateFramebuffer, pub destroy_framebuffer: PFN_vkDestroyFramebuffer, pub create_render_pass: PFN_vkCreateRenderPass, pub destroy_render_pass: PFN_vkDestroyRenderPass, pub get_render_area_granularity: PFN_vkGetRenderAreaGranularity, pub create_command_pool: PFN_vkCreateCommandPool, pub destroy_command_pool: PFN_vkDestroyCommandPool, pub reset_command_pool: PFN_vkResetCommandPool, pub allocate_command_buffers: PFN_vkAllocateCommandBuffers, pub free_command_buffers: PFN_vkFreeCommandBuffers, pub begin_command_buffer: PFN_vkBeginCommandBuffer, pub end_command_buffer: PFN_vkEndCommandBuffer, pub reset_command_buffer: PFN_vkResetCommandBuffer, pub cmd_bind_pipeline: PFN_vkCmdBindPipeline, pub cmd_set_viewport: PFN_vkCmdSetViewport, pub cmd_set_scissor: PFN_vkCmdSetScissor, pub cmd_set_line_width: PFN_vkCmdSetLineWidth, pub cmd_set_depth_bias: PFN_vkCmdSetDepthBias, pub cmd_set_blend_constants: PFN_vkCmdSetBlendConstants, pub cmd_set_depth_bounds: PFN_vkCmdSetDepthBounds, pub cmd_set_stencil_compare_mask: PFN_vkCmdSetStencilCompareMask, pub cmd_set_stencil_write_mask: PFN_vkCmdSetStencilWriteMask, pub cmd_set_stencil_reference: PFN_vkCmdSetStencilReference, pub cmd_bind_descriptor_sets: PFN_vkCmdBindDescriptorSets, pub cmd_bind_index_buffer: PFN_vkCmdBindIndexBuffer, pub cmd_bind_vertex_buffers: PFN_vkCmdBindVertexBuffers, pub cmd_draw: PFN_vkCmdDraw, pub cmd_draw_indexed: PFN_vkCmdDrawIndexed, pub cmd_draw_indirect: PFN_vkCmdDrawIndirect, pub cmd_draw_indexed_indirect: PFN_vkCmdDrawIndexedIndirect, pub cmd_dispatch: PFN_vkCmdDispatch, pub cmd_dispatch_indirect: PFN_vkCmdDispatchIndirect, pub cmd_copy_buffer: PFN_vkCmdCopyBuffer, pub cmd_copy_image: PFN_vkCmdCopyImage, pub cmd_blit_image: PFN_vkCmdBlitImage, pub cmd_copy_buffer_to_image: PFN_vkCmdCopyBufferToImage, pub cmd_copy_image_to_buffer: PFN_vkCmdCopyImageToBuffer, pub cmd_update_buffer: PFN_vkCmdUpdateBuffer, pub cmd_fill_buffer: PFN_vkCmdFillBuffer, pub cmd_clear_color_image: PFN_vkCmdClearColorImage, pub cmd_clear_depth_stencil_image: PFN_vkCmdClearDepthStencilImage, pub cmd_clear_attachments: PFN_vkCmdClearAttachments, pub cmd_resolve_image: PFN_vkCmdResolveImage, pub cmd_set_event: PFN_vkCmdSetEvent, pub cmd_reset_event: PFN_vkCmdResetEvent, pub cmd_wait_events: PFN_vkCmdWaitEvents, pub cmd_pipeline_barrier: PFN_vkCmdPipelineBarrier, pub cmd_begin_query: PFN_vkCmdBeginQuery, pub cmd_end_query: PFN_vkCmdEndQuery, pub cmd_reset_query_pool: PFN_vkCmdResetQueryPool, pub cmd_write_timestamp: PFN_vkCmdWriteTimestamp, pub cmd_copy_query_pool_results: PFN_vkCmdCopyQueryPoolResults, pub cmd_push_constants: PFN_vkCmdPushConstants, pub cmd_begin_render_pass: PFN_vkCmdBeginRenderPass, pub cmd_next_subpass: PFN_vkCmdNextSubpass, pub cmd_end_render_pass: PFN_vkCmdEndRenderPass, pub cmd_execute_commands: PFN_vkCmdExecuteCommands, } unsafe impl Send for DeviceFnV1_0 {} unsafe impl Sync for DeviceFnV1_0 {} impl DeviceFnV1_0 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { destroy_device: unsafe { unsafe extern "system" fn destroy_device( _device: crate::vk::Device, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_device))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyDevice\0"); let val = _f(cname); if val.is_null() { destroy_device } else { ::core::mem::transmute(val) } }, get_device_queue: unsafe { unsafe extern "system" fn get_device_queue( _device: crate::vk::Device, _queue_family_index: u32, _queue_index: u32, _p_queue: *mut Queue, ) { panic!(concat!("Unable to load ", stringify!(get_device_queue))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceQueue\0"); let val = _f(cname); if val.is_null() { get_device_queue } else { ::core::mem::transmute(val) } }, queue_submit: unsafe { unsafe extern "system" fn queue_submit( _queue: Queue, _submit_count: u32, _p_submits: *const SubmitInfo<'_>, _fence: Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(queue_submit))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit\0"); let val = _f(cname); if val.is_null() { queue_submit } else { ::core::mem::transmute(val) } }, queue_wait_idle: unsafe { unsafe extern "system" fn queue_wait_idle(_queue: Queue) -> Result { panic!(concat!("Unable to load ", stringify!(queue_wait_idle))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueWaitIdle\0"); let val = _f(cname); if val.is_null() { queue_wait_idle } else { ::core::mem::transmute(val) } }, device_wait_idle: unsafe { unsafe extern "system" fn device_wait_idle(_device: crate::vk::Device) -> Result { panic!(concat!("Unable to load ", stringify!(device_wait_idle))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDeviceWaitIdle\0"); let val = _f(cname); if val.is_null() { device_wait_idle } else { ::core::mem::transmute(val) } }, allocate_memory: unsafe { unsafe extern "system" fn allocate_memory( _device: crate::vk::Device, _p_allocate_info: *const MemoryAllocateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_memory: *mut DeviceMemory, ) -> Result { panic!(concat!("Unable to load ", stringify!(allocate_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAllocateMemory\0"); let val = _f(cname); if val.is_null() { allocate_memory } else { ::core::mem::transmute(val) } }, free_memory: unsafe { unsafe extern "system" fn free_memory( _device: crate::vk::Device, _memory: DeviceMemory, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(free_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkFreeMemory\0"); let val = _f(cname); if val.is_null() { free_memory } else { ::core::mem::transmute(val) } }, map_memory: unsafe { unsafe extern "system" fn map_memory( _device: crate::vk::Device, _memory: DeviceMemory, _offset: DeviceSize, _size: DeviceSize, _flags: MemoryMapFlags, _pp_data: *mut *mut c_void, ) -> Result { panic!(concat!("Unable to load ", stringify!(map_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkMapMemory\0"); let val = _f(cname); if val.is_null() { map_memory } else { ::core::mem::transmute(val) } }, unmap_memory: unsafe { unsafe extern "system" fn unmap_memory( _device: crate::vk::Device, _memory: DeviceMemory, ) { panic!(concat!("Unable to load ", stringify!(unmap_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkUnmapMemory\0"); let val = _f(cname); if val.is_null() { unmap_memory } else { ::core::mem::transmute(val) } }, flush_mapped_memory_ranges: unsafe { unsafe extern "system" fn flush_mapped_memory_ranges( _device: crate::vk::Device, _memory_range_count: u32, _p_memory_ranges: *const MappedMemoryRange<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(flush_mapped_memory_ranges) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkFlushMappedMemoryRanges\0"); let val = _f(cname); if val.is_null() { flush_mapped_memory_ranges } else { ::core::mem::transmute(val) } }, invalidate_mapped_memory_ranges: unsafe { unsafe extern "system" fn invalidate_mapped_memory_ranges( _device: crate::vk::Device, _memory_range_count: u32, _p_memory_ranges: *const MappedMemoryRange<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(invalidate_mapped_memory_ranges) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkInvalidateMappedMemoryRanges\0"); let val = _f(cname); if val.is_null() { invalidate_mapped_memory_ranges } else { ::core::mem::transmute(val) } }, get_device_memory_commitment: unsafe { unsafe extern "system" fn get_device_memory_commitment( _device: crate::vk::Device, _memory: DeviceMemory, _p_committed_memory_in_bytes: *mut DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(get_device_memory_commitment) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceMemoryCommitment\0"); let val = _f(cname); if val.is_null() { get_device_memory_commitment } else { ::core::mem::transmute(val) } }, bind_buffer_memory: unsafe { unsafe extern "system" fn bind_buffer_memory( _device: crate::vk::Device, _buffer: Buffer, _memory: DeviceMemory, _memory_offset: DeviceSize, ) -> Result { panic!(concat!("Unable to load ", stringify!(bind_buffer_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory\0"); let val = _f(cname); if val.is_null() { bind_buffer_memory } else { ::core::mem::transmute(val) } }, bind_image_memory: unsafe { unsafe extern "system" fn bind_image_memory( _device: crate::vk::Device, _image: Image, _memory: DeviceMemory, _memory_offset: DeviceSize, ) -> Result { panic!(concat!("Unable to load ", stringify!(bind_image_memory))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory\0"); let val = _f(cname); if val.is_null() { bind_image_memory } else { ::core::mem::transmute(val) } }, get_buffer_memory_requirements: unsafe { unsafe extern "system" fn get_buffer_memory_requirements( _device: crate::vk::Device, _buffer: Buffer, _p_memory_requirements: *mut MemoryRequirements, ) { panic!(concat!( "Unable to load ", stringify!(get_buffer_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferMemoryRequirements\0"); let val = _f(cname); if val.is_null() { get_buffer_memory_requirements } else { ::core::mem::transmute(val) } }, get_image_memory_requirements: unsafe { unsafe extern "system" fn get_image_memory_requirements( _device: crate::vk::Device, _image: Image, _p_memory_requirements: *mut MemoryRequirements, ) { panic!(concat!( "Unable to load ", stringify!(get_image_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageMemoryRequirements\0"); let val = _f(cname); if val.is_null() { get_image_memory_requirements } else { ::core::mem::transmute(val) } }, get_image_sparse_memory_requirements: unsafe { unsafe extern "system" fn get_image_sparse_memory_requirements( _device: crate::vk::Device, _image: Image, _p_sparse_memory_requirement_count: *mut u32, _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements, ) { panic!(concat!( "Unable to load ", stringify!(get_image_sparse_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageSparseMemoryRequirements\0"); let val = _f(cname); if val.is_null() { get_image_sparse_memory_requirements } else { ::core::mem::transmute(val) } }, queue_bind_sparse: unsafe { unsafe extern "system" fn queue_bind_sparse( _queue: Queue, _bind_info_count: u32, _p_bind_info: *const BindSparseInfo<'_>, _fence: Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(queue_bind_sparse))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueBindSparse\0"); let val = _f(cname); if val.is_null() { queue_bind_sparse } else { ::core::mem::transmute(val) } }, create_fence: unsafe { unsafe extern "system" fn create_fence( _device: crate::vk::Device, _p_create_info: *const FenceCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_fence: *mut Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_fence))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateFence\0"); let val = _f(cname); if val.is_null() { create_fence } else { ::core::mem::transmute(val) } }, destroy_fence: unsafe { unsafe extern "system" fn destroy_fence( _device: crate::vk::Device, _fence: Fence, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_fence))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyFence\0"); let val = _f(cname); if val.is_null() { destroy_fence } else { ::core::mem::transmute(val) } }, reset_fences: unsafe { unsafe extern "system" fn reset_fences( _device: crate::vk::Device, _fence_count: u32, _p_fences: *const Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(reset_fences))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetFences\0"); let val = _f(cname); if val.is_null() { reset_fences } else { ::core::mem::transmute(val) } }, get_fence_status: unsafe { unsafe extern "system" fn get_fence_status( _device: crate::vk::Device, _fence: Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_fence_status))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetFenceStatus\0"); let val = _f(cname); if val.is_null() { get_fence_status } else { ::core::mem::transmute(val) } }, wait_for_fences: unsafe { unsafe extern "system" fn wait_for_fences( _device: crate::vk::Device, _fence_count: u32, _p_fences: *const Fence, _wait_all: Bool32, _timeout: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(wait_for_fences))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkWaitForFences\0"); let val = _f(cname); if val.is_null() { wait_for_fences } else { ::core::mem::transmute(val) } }, create_semaphore: unsafe { unsafe extern "system" fn create_semaphore( _device: crate::vk::Device, _p_create_info: *const SemaphoreCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_semaphore: *mut Semaphore, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_semaphore))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateSemaphore\0"); let val = _f(cname); if val.is_null() { create_semaphore } else { ::core::mem::transmute(val) } }, destroy_semaphore: unsafe { unsafe extern "system" fn destroy_semaphore( _device: crate::vk::Device, _semaphore: Semaphore, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_semaphore))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroySemaphore\0"); let val = _f(cname); if val.is_null() { destroy_semaphore } else { ::core::mem::transmute(val) } }, create_event: unsafe { unsafe extern "system" fn create_event( _device: crate::vk::Device, _p_create_info: *const EventCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_event: *mut Event, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateEvent\0"); let val = _f(cname); if val.is_null() { create_event } else { ::core::mem::transmute(val) } }, destroy_event: unsafe { unsafe extern "system" fn destroy_event( _device: crate::vk::Device, _event: Event, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyEvent\0"); let val = _f(cname); if val.is_null() { destroy_event } else { ::core::mem::transmute(val) } }, get_event_status: unsafe { unsafe extern "system" fn get_event_status( _device: crate::vk::Device, _event: Event, ) -> Result { panic!(concat!("Unable to load ", stringify!(get_event_status))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetEventStatus\0"); let val = _f(cname); if val.is_null() { get_event_status } else { ::core::mem::transmute(val) } }, set_event: unsafe { unsafe extern "system" fn set_event( _device: crate::vk::Device, _event: Event, ) -> Result { panic!(concat!("Unable to load ", stringify!(set_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetEvent\0"); let val = _f(cname); if val.is_null() { set_event } else { ::core::mem::transmute(val) } }, reset_event: unsafe { unsafe extern "system" fn reset_event( _device: crate::vk::Device, _event: Event, ) -> Result { panic!(concat!("Unable to load ", stringify!(reset_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetEvent\0"); let val = _f(cname); if val.is_null() { reset_event } else { ::core::mem::transmute(val) } }, create_query_pool: unsafe { unsafe extern "system" fn create_query_pool( _device: crate::vk::Device, _p_create_info: *const QueryPoolCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_query_pool: *mut QueryPool, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_query_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateQueryPool\0"); let val = _f(cname); if val.is_null() { create_query_pool } else { ::core::mem::transmute(val) } }, destroy_query_pool: unsafe { unsafe extern "system" fn destroy_query_pool( _device: crate::vk::Device, _query_pool: QueryPool, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_query_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyQueryPool\0"); let val = _f(cname); if val.is_null() { destroy_query_pool } else { ::core::mem::transmute(val) } }, get_query_pool_results: unsafe { unsafe extern "system" fn get_query_pool_results( _device: crate::vk::Device, _query_pool: QueryPool, _first_query: u32, _query_count: u32, _data_size: usize, _p_data: *mut c_void, _stride: DeviceSize, _flags: QueryResultFlags, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_query_pool_results) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetQueryPoolResults\0"); let val = _f(cname); if val.is_null() { get_query_pool_results } else { ::core::mem::transmute(val) } }, create_buffer: unsafe { unsafe extern "system" fn create_buffer( _device: crate::vk::Device, _p_create_info: *const BufferCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_buffer: *mut Buffer, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateBuffer\0"); let val = _f(cname); if val.is_null() { create_buffer } else { ::core::mem::transmute(val) } }, destroy_buffer: unsafe { unsafe extern "system" fn destroy_buffer( _device: crate::vk::Device, _buffer: Buffer, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyBuffer\0"); let val = _f(cname); if val.is_null() { destroy_buffer } else { ::core::mem::transmute(val) } }, create_buffer_view: unsafe { unsafe extern "system" fn create_buffer_view( _device: crate::vk::Device, _p_create_info: *const BufferViewCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_view: *mut BufferView, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_buffer_view))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateBufferView\0"); let val = _f(cname); if val.is_null() { create_buffer_view } else { ::core::mem::transmute(val) } }, destroy_buffer_view: unsafe { unsafe extern "system" fn destroy_buffer_view( _device: crate::vk::Device, _buffer_view: BufferView, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_buffer_view))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyBufferView\0"); let val = _f(cname); if val.is_null() { destroy_buffer_view } else { ::core::mem::transmute(val) } }, create_image: unsafe { unsafe extern "system" fn create_image( _device: crate::vk::Device, _p_create_info: *const ImageCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_image: *mut Image, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_image))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateImage\0"); let val = _f(cname); if val.is_null() { create_image } else { ::core::mem::transmute(val) } }, destroy_image: unsafe { unsafe extern "system" fn destroy_image( _device: crate::vk::Device, _image: Image, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_image))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyImage\0"); let val = _f(cname); if val.is_null() { destroy_image } else { ::core::mem::transmute(val) } }, get_image_subresource_layout: unsafe { unsafe extern "system" fn get_image_subresource_layout( _device: crate::vk::Device, _image: Image, _p_subresource: *const ImageSubresource, _p_layout: *mut SubresourceLayout, ) { panic!(concat!( "Unable to load ", stringify!(get_image_subresource_layout) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageSubresourceLayout\0"); let val = _f(cname); if val.is_null() { get_image_subresource_layout } else { ::core::mem::transmute(val) } }, create_image_view: unsafe { unsafe extern "system" fn create_image_view( _device: crate::vk::Device, _p_create_info: *const ImageViewCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_view: *mut ImageView, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_image_view))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateImageView\0"); let val = _f(cname); if val.is_null() { create_image_view } else { ::core::mem::transmute(val) } }, destroy_image_view: unsafe { unsafe extern "system" fn destroy_image_view( _device: crate::vk::Device, _image_view: ImageView, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_image_view))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyImageView\0"); let val = _f(cname); if val.is_null() { destroy_image_view } else { ::core::mem::transmute(val) } }, create_shader_module: unsafe { unsafe extern "system" fn create_shader_module( _device: crate::vk::Device, _p_create_info: *const ShaderModuleCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_shader_module: *mut ShaderModule, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_shader_module))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateShaderModule\0"); let val = _f(cname); if val.is_null() { create_shader_module } else { ::core::mem::transmute(val) } }, destroy_shader_module: unsafe { unsafe extern "system" fn destroy_shader_module( _device: crate::vk::Device, _shader_module: ShaderModule, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_shader_module) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyShaderModule\0"); let val = _f(cname); if val.is_null() { destroy_shader_module } else { ::core::mem::transmute(val) } }, create_pipeline_cache: unsafe { unsafe extern "system" fn create_pipeline_cache( _device: crate::vk::Device, _p_create_info: *const PipelineCacheCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipeline_cache: *mut PipelineCache, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_pipeline_cache) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreatePipelineCache\0"); let val = _f(cname); if val.is_null() { create_pipeline_cache } else { ::core::mem::transmute(val) } }, destroy_pipeline_cache: unsafe { unsafe extern "system" fn destroy_pipeline_cache( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_pipeline_cache) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipelineCache\0"); let val = _f(cname); if val.is_null() { destroy_pipeline_cache } else { ::core::mem::transmute(val) } }, get_pipeline_cache_data: unsafe { unsafe extern "system" fn get_pipeline_cache_data( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _p_data_size: *mut usize, _p_data: *mut c_void, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_pipeline_cache_data) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPipelineCacheData\0"); let val = _f(cname); if val.is_null() { get_pipeline_cache_data } else { ::core::mem::transmute(val) } }, merge_pipeline_caches: unsafe { unsafe extern "system" fn merge_pipeline_caches( _device: crate::vk::Device, _dst_cache: PipelineCache, _src_cache_count: u32, _p_src_caches: *const PipelineCache, ) -> Result { panic!(concat!( "Unable to load ", stringify!(merge_pipeline_caches) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkMergePipelineCaches\0"); let val = _f(cname); if val.is_null() { merge_pipeline_caches } else { ::core::mem::transmute(val) } }, create_graphics_pipelines: unsafe { unsafe extern "system" fn create_graphics_pipelines( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _create_info_count: u32, _p_create_infos: *const GraphicsPipelineCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipelines: *mut Pipeline, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_graphics_pipelines) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateGraphicsPipelines\0"); let val = _f(cname); if val.is_null() { create_graphics_pipelines } else { ::core::mem::transmute(val) } }, create_compute_pipelines: unsafe { unsafe extern "system" fn create_compute_pipelines( _device: crate::vk::Device, _pipeline_cache: PipelineCache, _create_info_count: u32, _p_create_infos: *const ComputePipelineCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipelines: *mut Pipeline, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_compute_pipelines) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateComputePipelines\0"); let val = _f(cname); if val.is_null() { create_compute_pipelines } else { ::core::mem::transmute(val) } }, destroy_pipeline: unsafe { unsafe extern "system" fn destroy_pipeline( _device: crate::vk::Device, _pipeline: Pipeline, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_pipeline))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipeline\0"); let val = _f(cname); if val.is_null() { destroy_pipeline } else { ::core::mem::transmute(val) } }, create_pipeline_layout: unsafe { unsafe extern "system" fn create_pipeline_layout( _device: crate::vk::Device, _p_create_info: *const PipelineLayoutCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_pipeline_layout: *mut PipelineLayout, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_pipeline_layout) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreatePipelineLayout\0"); let val = _f(cname); if val.is_null() { create_pipeline_layout } else { ::core::mem::transmute(val) } }, destroy_pipeline_layout: unsafe { unsafe extern "system" fn destroy_pipeline_layout( _device: crate::vk::Device, _pipeline_layout: PipelineLayout, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_pipeline_layout) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipelineLayout\0"); let val = _f(cname); if val.is_null() { destroy_pipeline_layout } else { ::core::mem::transmute(val) } }, create_sampler: unsafe { unsafe extern "system" fn create_sampler( _device: crate::vk::Device, _p_create_info: *const SamplerCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_sampler: *mut Sampler, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_sampler))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateSampler\0"); let val = _f(cname); if val.is_null() { create_sampler } else { ::core::mem::transmute(val) } }, destroy_sampler: unsafe { unsafe extern "system" fn destroy_sampler( _device: crate::vk::Device, _sampler: Sampler, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_sampler))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroySampler\0"); let val = _f(cname); if val.is_null() { destroy_sampler } else { ::core::mem::transmute(val) } }, create_descriptor_set_layout: unsafe { unsafe extern "system" fn create_descriptor_set_layout( _device: crate::vk::Device, _p_create_info: *const DescriptorSetLayoutCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_set_layout: *mut DescriptorSetLayout, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_descriptor_set_layout) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDescriptorSetLayout\0"); let val = _f(cname); if val.is_null() { create_descriptor_set_layout } else { ::core::mem::transmute(val) } }, destroy_descriptor_set_layout: unsafe { unsafe extern "system" fn destroy_descriptor_set_layout( _device: crate::vk::Device, _descriptor_set_layout: DescriptorSetLayout, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_descriptor_set_layout) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyDescriptorSetLayout\0"); let val = _f(cname); if val.is_null() { destroy_descriptor_set_layout } else { ::core::mem::transmute(val) } }, create_descriptor_pool: unsafe { unsafe extern "system" fn create_descriptor_pool( _device: crate::vk::Device, _p_create_info: *const DescriptorPoolCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_descriptor_pool: *mut DescriptorPool, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_descriptor_pool) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDescriptorPool\0"); let val = _f(cname); if val.is_null() { create_descriptor_pool } else { ::core::mem::transmute(val) } }, destroy_descriptor_pool: unsafe { unsafe extern "system" fn destroy_descriptor_pool( _device: crate::vk::Device, _descriptor_pool: DescriptorPool, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_descriptor_pool) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyDescriptorPool\0"); let val = _f(cname); if val.is_null() { destroy_descriptor_pool } else { ::core::mem::transmute(val) } }, reset_descriptor_pool: unsafe { unsafe extern "system" fn reset_descriptor_pool( _device: crate::vk::Device, _descriptor_pool: DescriptorPool, _flags: DescriptorPoolResetFlags, ) -> Result { panic!(concat!( "Unable to load ", stringify!(reset_descriptor_pool) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetDescriptorPool\0"); let val = _f(cname); if val.is_null() { reset_descriptor_pool } else { ::core::mem::transmute(val) } }, allocate_descriptor_sets: unsafe { unsafe extern "system" fn allocate_descriptor_sets( _device: crate::vk::Device, _p_allocate_info: *const DescriptorSetAllocateInfo<'_>, _p_descriptor_sets: *mut DescriptorSet, ) -> Result { panic!(concat!( "Unable to load ", stringify!(allocate_descriptor_sets) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAllocateDescriptorSets\0"); let val = _f(cname); if val.is_null() { allocate_descriptor_sets } else { ::core::mem::transmute(val) } }, free_descriptor_sets: unsafe { unsafe extern "system" fn free_descriptor_sets( _device: crate::vk::Device, _descriptor_pool: DescriptorPool, _descriptor_set_count: u32, _p_descriptor_sets: *const DescriptorSet, ) -> Result { panic!(concat!("Unable to load ", stringify!(free_descriptor_sets))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkFreeDescriptorSets\0"); let val = _f(cname); if val.is_null() { free_descriptor_sets } else { ::core::mem::transmute(val) } }, update_descriptor_sets: unsafe { unsafe extern "system" fn update_descriptor_sets( _device: crate::vk::Device, _descriptor_write_count: u32, _p_descriptor_writes: *const WriteDescriptorSet<'_>, _descriptor_copy_count: u32, _p_descriptor_copies: *const CopyDescriptorSet<'_>, ) { panic!(concat!( "Unable to load ", stringify!(update_descriptor_sets) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkUpdateDescriptorSets\0"); let val = _f(cname); if val.is_null() { update_descriptor_sets } else { ::core::mem::transmute(val) } }, create_framebuffer: unsafe { unsafe extern "system" fn create_framebuffer( _device: crate::vk::Device, _p_create_info: *const FramebufferCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_framebuffer: *mut Framebuffer, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_framebuffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateFramebuffer\0"); let val = _f(cname); if val.is_null() { create_framebuffer } else { ::core::mem::transmute(val) } }, destroy_framebuffer: unsafe { unsafe extern "system" fn destroy_framebuffer( _device: crate::vk::Device, _framebuffer: Framebuffer, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_framebuffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyFramebuffer\0"); let val = _f(cname); if val.is_null() { destroy_framebuffer } else { ::core::mem::transmute(val) } }, create_render_pass: unsafe { unsafe extern "system" fn create_render_pass( _device: crate::vk::Device, _p_create_info: *const RenderPassCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_render_pass: *mut RenderPass, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_render_pass))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass\0"); let val = _f(cname); if val.is_null() { create_render_pass } else { ::core::mem::transmute(val) } }, destroy_render_pass: unsafe { unsafe extern "system" fn destroy_render_pass( _device: crate::vk::Device, _render_pass: RenderPass, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_render_pass))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyRenderPass\0"); let val = _f(cname); if val.is_null() { destroy_render_pass } else { ::core::mem::transmute(val) } }, get_render_area_granularity: unsafe { unsafe extern "system" fn get_render_area_granularity( _device: crate::vk::Device, _render_pass: RenderPass, _p_granularity: *mut Extent2D, ) { panic!(concat!( "Unable to load ", stringify!(get_render_area_granularity) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetRenderAreaGranularity\0"); let val = _f(cname); if val.is_null() { get_render_area_granularity } else { ::core::mem::transmute(val) } }, create_command_pool: unsafe { unsafe extern "system" fn create_command_pool( _device: crate::vk::Device, _p_create_info: *const CommandPoolCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_command_pool: *mut CommandPool, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_command_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateCommandPool\0"); let val = _f(cname); if val.is_null() { create_command_pool } else { ::core::mem::transmute(val) } }, destroy_command_pool: unsafe { unsafe extern "system" fn destroy_command_pool( _device: crate::vk::Device, _command_pool: CommandPool, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!("Unable to load ", stringify!(destroy_command_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyCommandPool\0"); let val = _f(cname); if val.is_null() { destroy_command_pool } else { ::core::mem::transmute(val) } }, reset_command_pool: unsafe { unsafe extern "system" fn reset_command_pool( _device: crate::vk::Device, _command_pool: CommandPool, _flags: CommandPoolResetFlags, ) -> Result { panic!(concat!("Unable to load ", stringify!(reset_command_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetCommandPool\0"); let val = _f(cname); if val.is_null() { reset_command_pool } else { ::core::mem::transmute(val) } }, allocate_command_buffers: unsafe { unsafe extern "system" fn allocate_command_buffers( _device: crate::vk::Device, _p_allocate_info: *const CommandBufferAllocateInfo<'_>, _p_command_buffers: *mut CommandBuffer, ) -> Result { panic!(concat!( "Unable to load ", stringify!(allocate_command_buffers) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkAllocateCommandBuffers\0"); let val = _f(cname); if val.is_null() { allocate_command_buffers } else { ::core::mem::transmute(val) } }, free_command_buffers: unsafe { unsafe extern "system" fn free_command_buffers( _device: crate::vk::Device, _command_pool: CommandPool, _command_buffer_count: u32, _p_command_buffers: *const CommandBuffer, ) { panic!(concat!("Unable to load ", stringify!(free_command_buffers))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkFreeCommandBuffers\0"); let val = _f(cname); if val.is_null() { free_command_buffers } else { ::core::mem::transmute(val) } }, begin_command_buffer: unsafe { unsafe extern "system" fn begin_command_buffer( _command_buffer: CommandBuffer, _p_begin_info: *const CommandBufferBeginInfo<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(begin_command_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBeginCommandBuffer\0"); let val = _f(cname); if val.is_null() { begin_command_buffer } else { ::core::mem::transmute(val) } }, end_command_buffer: unsafe { unsafe extern "system" fn end_command_buffer( _command_buffer: CommandBuffer, ) -> Result { panic!(concat!("Unable to load ", stringify!(end_command_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEndCommandBuffer\0"); let val = _f(cname); if val.is_null() { end_command_buffer } else { ::core::mem::transmute(val) } }, reset_command_buffer: unsafe { unsafe extern "system" fn reset_command_buffer( _command_buffer: CommandBuffer, _flags: CommandBufferResetFlags, ) -> Result { panic!(concat!("Unable to load ", stringify!(reset_command_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetCommandBuffer\0"); let val = _f(cname); if val.is_null() { reset_command_buffer } else { ::core::mem::transmute(val) } }, cmd_bind_pipeline: unsafe { unsafe extern "system" fn cmd_bind_pipeline( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _pipeline: Pipeline, ) { panic!(concat!("Unable to load ", stringify!(cmd_bind_pipeline))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindPipeline\0"); let val = _f(cname); if val.is_null() { cmd_bind_pipeline } else { ::core::mem::transmute(val) } }, cmd_set_viewport: unsafe { unsafe extern "system" fn cmd_set_viewport( _command_buffer: CommandBuffer, _first_viewport: u32, _viewport_count: u32, _p_viewports: *const Viewport, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_viewport))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewport\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport } else { ::core::mem::transmute(val) } }, cmd_set_scissor: unsafe { unsafe extern "system" fn cmd_set_scissor( _command_buffer: CommandBuffer, _first_scissor: u32, _scissor_count: u32, _p_scissors: *const Rect2D, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_scissor))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissor\0"); let val = _f(cname); if val.is_null() { cmd_set_scissor } else { ::core::mem::transmute(val) } }, cmd_set_line_width: unsafe { unsafe extern "system" fn cmd_set_line_width( _command_buffer: CommandBuffer, _line_width: f32, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_line_width))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineWidth\0"); let val = _f(cname); if val.is_null() { cmd_set_line_width } else { ::core::mem::transmute(val) } }, cmd_set_depth_bias: unsafe { unsafe extern "system" fn cmd_set_depth_bias( _command_buffer: CommandBuffer, _depth_bias_constant_factor: f32, _depth_bias_clamp: f32, _depth_bias_slope_factor: f32, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bias))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBias\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bias } else { ::core::mem::transmute(val) } }, cmd_set_blend_constants: unsafe { unsafe extern "system" fn cmd_set_blend_constants( _command_buffer: CommandBuffer, _blend_constants: *const [f32; 4usize], ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_blend_constants) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetBlendConstants\0"); let val = _f(cname); if val.is_null() { cmd_set_blend_constants } else { ::core::mem::transmute(val) } }, cmd_set_depth_bounds: unsafe { unsafe extern "system" fn cmd_set_depth_bounds( _command_buffer: CommandBuffer, _min_depth_bounds: f32, _max_depth_bounds: f32, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bounds))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBounds\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bounds } else { ::core::mem::transmute(val) } }, cmd_set_stencil_compare_mask: unsafe { unsafe extern "system" fn cmd_set_stencil_compare_mask( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _compare_mask: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_compare_mask) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilCompareMask\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_compare_mask } else { ::core::mem::transmute(val) } }, cmd_set_stencil_write_mask: unsafe { unsafe extern "system" fn cmd_set_stencil_write_mask( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _write_mask: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_write_mask) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilWriteMask\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_write_mask } else { ::core::mem::transmute(val) } }, cmd_set_stencil_reference: unsafe { unsafe extern "system" fn cmd_set_stencil_reference( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _reference: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_reference) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilReference\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_reference } else { ::core::mem::transmute(val) } }, cmd_bind_descriptor_sets: unsafe { unsafe extern "system" fn cmd_bind_descriptor_sets( _command_buffer: CommandBuffer, _pipeline_bind_point: PipelineBindPoint, _layout: PipelineLayout, _first_set: u32, _descriptor_set_count: u32, _p_descriptor_sets: *const DescriptorSet, _dynamic_offset_count: u32, _p_dynamic_offsets: *const u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_descriptor_sets) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindDescriptorSets\0"); let val = _f(cname); if val.is_null() { cmd_bind_descriptor_sets } else { ::core::mem::transmute(val) } }, cmd_bind_index_buffer: unsafe { unsafe extern "system" fn cmd_bind_index_buffer( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _index_type: IndexType, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_index_buffer) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindIndexBuffer\0"); let val = _f(cname); if val.is_null() { cmd_bind_index_buffer } else { ::core::mem::transmute(val) } }, cmd_bind_vertex_buffers: unsafe { unsafe extern "system" fn cmd_bind_vertex_buffers( _command_buffer: CommandBuffer, _first_binding: u32, _binding_count: u32, _p_buffers: *const Buffer, _p_offsets: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_vertex_buffers) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers\0"); let val = _f(cname); if val.is_null() { cmd_bind_vertex_buffers } else { ::core::mem::transmute(val) } }, cmd_draw: unsafe { unsafe extern "system" fn cmd_draw( _command_buffer: CommandBuffer, _vertex_count: u32, _instance_count: u32, _first_vertex: u32, _first_instance: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_draw))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDraw\0"); let val = _f(cname); if val.is_null() { cmd_draw } else { ::core::mem::transmute(val) } }, cmd_draw_indexed: unsafe { unsafe extern "system" fn cmd_draw_indexed( _command_buffer: CommandBuffer, _index_count: u32, _instance_count: u32, _first_index: u32, _vertex_offset: i32, _first_instance: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_draw_indexed))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndexed\0"); let val = _f(cname); if val.is_null() { cmd_draw_indexed } else { ::core::mem::transmute(val) } }, cmd_draw_indirect: unsafe { unsafe extern "system" fn cmd_draw_indirect( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _draw_count: u32, _stride: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_draw_indirect))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirect\0"); let val = _f(cname); if val.is_null() { cmd_draw_indirect } else { ::core::mem::transmute(val) } }, cmd_draw_indexed_indirect: unsafe { unsafe extern "system" fn cmd_draw_indexed_indirect( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indexed_indirect) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndexedIndirect\0"); let val = _f(cname); if val.is_null() { cmd_draw_indexed_indirect } else { ::core::mem::transmute(val) } }, cmd_dispatch: unsafe { unsafe extern "system" fn cmd_dispatch( _command_buffer: CommandBuffer, _group_count_x: u32, _group_count_y: u32, _group_count_z: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_dispatch))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatch\0"); let val = _f(cname); if val.is_null() { cmd_dispatch } else { ::core::mem::transmute(val) } }, cmd_dispatch_indirect: unsafe { unsafe extern "system" fn cmd_dispatch_indirect( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_dispatch_indirect) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchIndirect\0"); let val = _f(cname); if val.is_null() { cmd_dispatch_indirect } else { ::core::mem::transmute(val) } }, cmd_copy_buffer: unsafe { unsafe extern "system" fn cmd_copy_buffer( _command_buffer: CommandBuffer, _src_buffer: Buffer, _dst_buffer: Buffer, _region_count: u32, _p_regions: *const BufferCopy, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer } else { ::core::mem::transmute(val) } }, cmd_copy_image: unsafe { unsafe extern "system" fn cmd_copy_image( _command_buffer: CommandBuffer, _src_image: Image, _src_image_layout: ImageLayout, _dst_image: Image, _dst_image_layout: ImageLayout, _region_count: u32, _p_regions: *const ImageCopy, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_image))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage\0"); let val = _f(cname); if val.is_null() { cmd_copy_image } else { ::core::mem::transmute(val) } }, cmd_blit_image: unsafe { unsafe extern "system" fn cmd_blit_image( _command_buffer: CommandBuffer, _src_image: Image, _src_image_layout: ImageLayout, _dst_image: Image, _dst_image_layout: ImageLayout, _region_count: u32, _p_regions: *const ImageBlit, _filter: Filter, ) { panic!(concat!("Unable to load ", stringify!(cmd_blit_image))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage\0"); let val = _f(cname); if val.is_null() { cmd_blit_image } else { ::core::mem::transmute(val) } }, cmd_copy_buffer_to_image: unsafe { unsafe extern "system" fn cmd_copy_buffer_to_image( _command_buffer: CommandBuffer, _src_buffer: Buffer, _dst_image: Image, _dst_image_layout: ImageLayout, _region_count: u32, _p_regions: *const BufferImageCopy, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_buffer_to_image) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBufferToImage\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer_to_image } else { ::core::mem::transmute(val) } }, cmd_copy_image_to_buffer: unsafe { unsafe extern "system" fn cmd_copy_image_to_buffer( _command_buffer: CommandBuffer, _src_image: Image, _src_image_layout: ImageLayout, _dst_buffer: Buffer, _region_count: u32, _p_regions: *const BufferImageCopy, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_image_to_buffer) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImageToBuffer\0"); let val = _f(cname); if val.is_null() { cmd_copy_image_to_buffer } else { ::core::mem::transmute(val) } }, cmd_update_buffer: unsafe { unsafe extern "system" fn cmd_update_buffer( _command_buffer: CommandBuffer, _dst_buffer: Buffer, _dst_offset: DeviceSize, _data_size: DeviceSize, _p_data: *const c_void, ) { panic!(concat!("Unable to load ", stringify!(cmd_update_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdUpdateBuffer\0"); let val = _f(cname); if val.is_null() { cmd_update_buffer } else { ::core::mem::transmute(val) } }, cmd_fill_buffer: unsafe { unsafe extern "system" fn cmd_fill_buffer( _command_buffer: CommandBuffer, _dst_buffer: Buffer, _dst_offset: DeviceSize, _size: DeviceSize, _data: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_fill_buffer))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdFillBuffer\0"); let val = _f(cname); if val.is_null() { cmd_fill_buffer } else { ::core::mem::transmute(val) } }, cmd_clear_color_image: unsafe { unsafe extern "system" fn cmd_clear_color_image( _command_buffer: CommandBuffer, _image: Image, _image_layout: ImageLayout, _p_color: *const ClearColorValue, _range_count: u32, _p_ranges: *const ImageSubresourceRange, ) { panic!(concat!( "Unable to load ", stringify!(cmd_clear_color_image) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdClearColorImage\0"); let val = _f(cname); if val.is_null() { cmd_clear_color_image } else { ::core::mem::transmute(val) } }, cmd_clear_depth_stencil_image: unsafe { unsafe extern "system" fn cmd_clear_depth_stencil_image( _command_buffer: CommandBuffer, _image: Image, _image_layout: ImageLayout, _p_depth_stencil: *const ClearDepthStencilValue, _range_count: u32, _p_ranges: *const ImageSubresourceRange, ) { panic!(concat!( "Unable to load ", stringify!(cmd_clear_depth_stencil_image) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdClearDepthStencilImage\0"); let val = _f(cname); if val.is_null() { cmd_clear_depth_stencil_image } else { ::core::mem::transmute(val) } }, cmd_clear_attachments: unsafe { unsafe extern "system" fn cmd_clear_attachments( _command_buffer: CommandBuffer, _attachment_count: u32, _p_attachments: *const ClearAttachment, _rect_count: u32, _p_rects: *const ClearRect, ) { panic!(concat!( "Unable to load ", stringify!(cmd_clear_attachments) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdClearAttachments\0"); let val = _f(cname); if val.is_null() { cmd_clear_attachments } else { ::core::mem::transmute(val) } }, cmd_resolve_image: unsafe { unsafe extern "system" fn cmd_resolve_image( _command_buffer: CommandBuffer, _src_image: Image, _src_image_layout: ImageLayout, _dst_image: Image, _dst_image_layout: ImageLayout, _region_count: u32, _p_regions: *const ImageResolve, ) { panic!(concat!("Unable to load ", stringify!(cmd_resolve_image))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage\0"); let val = _f(cname); if val.is_null() { cmd_resolve_image } else { ::core::mem::transmute(val) } }, cmd_set_event: unsafe { unsafe extern "system" fn cmd_set_event( _command_buffer: CommandBuffer, _event: Event, _stage_mask: PipelineStageFlags, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent\0"); let val = _f(cname); if val.is_null() { cmd_set_event } else { ::core::mem::transmute(val) } }, cmd_reset_event: unsafe { unsafe extern "system" fn cmd_reset_event( _command_buffer: CommandBuffer, _event: Event, _stage_mask: PipelineStageFlags, ) { panic!(concat!("Unable to load ", stringify!(cmd_reset_event))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent\0"); let val = _f(cname); if val.is_null() { cmd_reset_event } else { ::core::mem::transmute(val) } }, cmd_wait_events: unsafe { unsafe extern "system" fn cmd_wait_events( _command_buffer: CommandBuffer, _event_count: u32, _p_events: *const Event, _src_stage_mask: PipelineStageFlags, _dst_stage_mask: PipelineStageFlags, _memory_barrier_count: u32, _p_memory_barriers: *const MemoryBarrier<'_>, _buffer_memory_barrier_count: u32, _p_buffer_memory_barriers: *const BufferMemoryBarrier<'_>, _image_memory_barrier_count: u32, _p_image_memory_barriers: *const ImageMemoryBarrier<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_wait_events))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents\0"); let val = _f(cname); if val.is_null() { cmd_wait_events } else { ::core::mem::transmute(val) } }, cmd_pipeline_barrier: unsafe { unsafe extern "system" fn cmd_pipeline_barrier( _command_buffer: CommandBuffer, _src_stage_mask: PipelineStageFlags, _dst_stage_mask: PipelineStageFlags, _dependency_flags: DependencyFlags, _memory_barrier_count: u32, _p_memory_barriers: *const MemoryBarrier<'_>, _buffer_memory_barrier_count: u32, _p_buffer_memory_barriers: *const BufferMemoryBarrier<'_>, _image_memory_barrier_count: u32, _p_image_memory_barriers: *const ImageMemoryBarrier<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_pipeline_barrier))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier\0"); let val = _f(cname); if val.is_null() { cmd_pipeline_barrier } else { ::core::mem::transmute(val) } }, cmd_begin_query: unsafe { unsafe extern "system" fn cmd_begin_query( _command_buffer: CommandBuffer, _query_pool: QueryPool, _query: u32, _flags: QueryControlFlags, ) { panic!(concat!("Unable to load ", stringify!(cmd_begin_query))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginQuery\0"); let val = _f(cname); if val.is_null() { cmd_begin_query } else { ::core::mem::transmute(val) } }, cmd_end_query: unsafe { unsafe extern "system" fn cmd_end_query( _command_buffer: CommandBuffer, _query_pool: QueryPool, _query: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_end_query))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndQuery\0"); let val = _f(cname); if val.is_null() { cmd_end_query } else { ::core::mem::transmute(val) } }, cmd_reset_query_pool: unsafe { unsafe extern "system" fn cmd_reset_query_pool( _command_buffer: CommandBuffer, _query_pool: QueryPool, _first_query: u32, _query_count: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_reset_query_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResetQueryPool\0"); let val = _f(cname); if val.is_null() { cmd_reset_query_pool } else { ::core::mem::transmute(val) } }, cmd_write_timestamp: unsafe { unsafe extern "system" fn cmd_write_timestamp( _command_buffer: CommandBuffer, _pipeline_stage: PipelineStageFlags, _query_pool: QueryPool, _query: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_write_timestamp))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp\0"); let val = _f(cname); if val.is_null() { cmd_write_timestamp } else { ::core::mem::transmute(val) } }, cmd_copy_query_pool_results: unsafe { unsafe extern "system" fn cmd_copy_query_pool_results( _command_buffer: CommandBuffer, _query_pool: QueryPool, _first_query: u32, _query_count: u32, _dst_buffer: Buffer, _dst_offset: DeviceSize, _stride: DeviceSize, _flags: QueryResultFlags, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_query_pool_results) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyQueryPoolResults\0"); let val = _f(cname); if val.is_null() { cmd_copy_query_pool_results } else { ::core::mem::transmute(val) } }, cmd_push_constants: unsafe { unsafe extern "system" fn cmd_push_constants( _command_buffer: CommandBuffer, _layout: PipelineLayout, _stage_flags: ShaderStageFlags, _offset: u32, _size: u32, _p_values: *const c_void, ) { panic!(concat!("Unable to load ", stringify!(cmd_push_constants))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPushConstants\0"); let val = _f(cname); if val.is_null() { cmd_push_constants } else { ::core::mem::transmute(val) } }, cmd_begin_render_pass: unsafe { unsafe extern "system" fn cmd_begin_render_pass( _command_buffer: CommandBuffer, _p_render_pass_begin: *const RenderPassBeginInfo<'_>, _contents: SubpassContents, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_render_pass) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass\0"); let val = _f(cname); if val.is_null() { cmd_begin_render_pass } else { ::core::mem::transmute(val) } }, cmd_next_subpass: unsafe { unsafe extern "system" fn cmd_next_subpass( _command_buffer: CommandBuffer, _contents: SubpassContents, ) { panic!(concat!("Unable to load ", stringify!(cmd_next_subpass))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass\0"); let val = _f(cname); if val.is_null() { cmd_next_subpass } else { ::core::mem::transmute(val) } }, cmd_end_render_pass: unsafe { unsafe extern "system" fn cmd_end_render_pass(_command_buffer: CommandBuffer) { panic!(concat!("Unable to load ", stringify!(cmd_end_render_pass))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass\0"); let val = _f(cname); if val.is_null() { cmd_end_render_pass } else { ::core::mem::transmute(val) } }, cmd_execute_commands: unsafe { unsafe extern "system" fn cmd_execute_commands( _command_buffer: CommandBuffer, _command_buffer_count: u32, _p_command_buffers: *const CommandBuffer, ) { panic!(concat!("Unable to load ", stringify!(cmd_execute_commands))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdExecuteCommands\0"); let val = _f(cname); if val.is_null() { cmd_execute_commands } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.1 entry point function pointers"] pub struct EntryFnV1_1 { pub enumerate_instance_version: PFN_vkEnumerateInstanceVersion, } unsafe impl Send for EntryFnV1_1 {} unsafe impl Sync for EntryFnV1_1 {} impl EntryFnV1_1 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { enumerate_instance_version: unsafe { unsafe extern "system" fn enumerate_instance_version( _p_api_version: *mut u32, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_instance_version) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumerateInstanceVersion\0"); let val = _f(cname); if val.is_null() { enumerate_instance_version } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.1 instance-level function pointers"] pub struct InstanceFnV1_1 { pub enumerate_physical_device_groups: PFN_vkEnumeratePhysicalDeviceGroups, pub get_physical_device_features2: PFN_vkGetPhysicalDeviceFeatures2, pub get_physical_device_properties2: PFN_vkGetPhysicalDeviceProperties2, pub get_physical_device_format_properties2: PFN_vkGetPhysicalDeviceFormatProperties2, pub get_physical_device_image_format_properties2: PFN_vkGetPhysicalDeviceImageFormatProperties2, pub get_physical_device_queue_family_properties2: PFN_vkGetPhysicalDeviceQueueFamilyProperties2, pub get_physical_device_memory_properties2: PFN_vkGetPhysicalDeviceMemoryProperties2, pub get_physical_device_sparse_image_format_properties2: PFN_vkGetPhysicalDeviceSparseImageFormatProperties2, pub get_physical_device_external_buffer_properties: PFN_vkGetPhysicalDeviceExternalBufferProperties, pub get_physical_device_external_fence_properties: PFN_vkGetPhysicalDeviceExternalFenceProperties, pub get_physical_device_external_semaphore_properties: PFN_vkGetPhysicalDeviceExternalSemaphoreProperties, } unsafe impl Send for InstanceFnV1_1 {} unsafe impl Sync for InstanceFnV1_1 {} impl InstanceFnV1_1 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { enumerate_physical_device_groups: unsafe { unsafe extern "system" fn enumerate_physical_device_groups( _instance: crate::vk::Instance, _p_physical_device_group_count: *mut u32, _p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(enumerate_physical_device_groups) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkEnumeratePhysicalDeviceGroups\0"); let val = _f(cname); if val.is_null() { enumerate_physical_device_groups } else { ::core::mem::transmute(val) } }, get_physical_device_features2: unsafe { unsafe extern "system" fn get_physical_device_features2( _physical_device: PhysicalDevice, _p_features: *mut PhysicalDeviceFeatures2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_features2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceFeatures2\0"); let val = _f(cname); if val.is_null() { get_physical_device_features2 } else { ::core::mem::transmute(val) } }, get_physical_device_properties2: unsafe { unsafe extern "system" fn get_physical_device_properties2( _physical_device: PhysicalDevice, _p_properties: *mut PhysicalDeviceProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceProperties2\0"); let val = _f(cname); if val.is_null() { get_physical_device_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_format_properties2: unsafe { unsafe extern "system" fn get_physical_device_format_properties2( _physical_device: PhysicalDevice, _format: Format, _p_format_properties: *mut FormatProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_format_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceFormatProperties2\0"); let val = _f(cname); if val.is_null() { get_physical_device_format_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_image_format_properties2: unsafe { unsafe extern "system" fn get_physical_device_image_format_properties2( _physical_device: PhysicalDevice, _p_image_format_info: *const PhysicalDeviceImageFormatInfo2<'_>, _p_image_format_properties: *mut ImageFormatProperties2<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_image_format_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceImageFormatProperties2\0", ); let val = _f(cname); if val.is_null() { get_physical_device_image_format_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_queue_family_properties2: unsafe { unsafe extern "system" fn get_physical_device_queue_family_properties2( _physical_device: PhysicalDevice, _p_queue_family_property_count: *mut u32, _p_queue_family_properties: *mut QueueFamilyProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_queue_family_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceQueueFamilyProperties2\0", ); let val = _f(cname); if val.is_null() { get_physical_device_queue_family_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_memory_properties2: unsafe { unsafe extern "system" fn get_physical_device_memory_properties2( _physical_device: PhysicalDevice, _p_memory_properties: *mut PhysicalDeviceMemoryProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_memory_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceMemoryProperties2\0"); let val = _f(cname); if val.is_null() { get_physical_device_memory_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_sparse_image_format_properties2: unsafe { unsafe extern "system" fn get_physical_device_sparse_image_format_properties2( _physical_device: PhysicalDevice, _p_format_info: *const PhysicalDeviceSparseImageFormatInfo2<'_>, _p_property_count: *mut u32, _p_properties: *mut SparseImageFormatProperties2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_sparse_image_format_properties2) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceSparseImageFormatProperties2\0", ); let val = _f(cname); if val.is_null() { get_physical_device_sparse_image_format_properties2 } else { ::core::mem::transmute(val) } }, get_physical_device_external_buffer_properties: unsafe { unsafe extern "system" fn get_physical_device_external_buffer_properties( _physical_device: PhysicalDevice, _p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo<'_>, _p_external_buffer_properties: *mut ExternalBufferProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_buffer_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalBufferProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_buffer_properties } else { ::core::mem::transmute(val) } }, get_physical_device_external_fence_properties: unsafe { unsafe extern "system" fn get_physical_device_external_fence_properties( _physical_device: PhysicalDevice, _p_external_fence_info: *const PhysicalDeviceExternalFenceInfo<'_>, _p_external_fence_properties: *mut ExternalFenceProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_fence_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalFenceProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_fence_properties } else { ::core::mem::transmute(val) } }, get_physical_device_external_semaphore_properties: unsafe { unsafe extern "system" fn get_physical_device_external_semaphore_properties( _physical_device: PhysicalDevice, _p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo<'_>, _p_external_semaphore_properties: *mut ExternalSemaphoreProperties<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_physical_device_external_semaphore_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetPhysicalDeviceExternalSemaphoreProperties\0", ); let val = _f(cname); if val.is_null() { get_physical_device_external_semaphore_properties } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.1 device-level function pointers"] pub struct DeviceFnV1_1 { pub bind_buffer_memory2: PFN_vkBindBufferMemory2, pub bind_image_memory2: PFN_vkBindImageMemory2, pub get_device_group_peer_memory_features: PFN_vkGetDeviceGroupPeerMemoryFeatures, pub cmd_set_device_mask: PFN_vkCmdSetDeviceMask, pub cmd_dispatch_base: PFN_vkCmdDispatchBase, pub get_image_memory_requirements2: PFN_vkGetImageMemoryRequirements2, pub get_buffer_memory_requirements2: PFN_vkGetBufferMemoryRequirements2, pub get_image_sparse_memory_requirements2: PFN_vkGetImageSparseMemoryRequirements2, pub trim_command_pool: PFN_vkTrimCommandPool, pub get_device_queue2: PFN_vkGetDeviceQueue2, pub create_sampler_ycbcr_conversion: PFN_vkCreateSamplerYcbcrConversion, pub destroy_sampler_ycbcr_conversion: PFN_vkDestroySamplerYcbcrConversion, pub create_descriptor_update_template: PFN_vkCreateDescriptorUpdateTemplate, pub destroy_descriptor_update_template: PFN_vkDestroyDescriptorUpdateTemplate, pub update_descriptor_set_with_template: PFN_vkUpdateDescriptorSetWithTemplate, pub get_descriptor_set_layout_support: PFN_vkGetDescriptorSetLayoutSupport, } unsafe impl Send for DeviceFnV1_1 {} unsafe impl Sync for DeviceFnV1_1 {} impl DeviceFnV1_1 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { bind_buffer_memory2: unsafe { unsafe extern "system" fn bind_buffer_memory2( _device: crate::vk::Device, _bind_info_count: u32, _p_bind_infos: *const BindBufferMemoryInfo<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(bind_buffer_memory2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory2\0"); let val = _f(cname); if val.is_null() { bind_buffer_memory2 } else { ::core::mem::transmute(val) } }, bind_image_memory2: unsafe { unsafe extern "system" fn bind_image_memory2( _device: crate::vk::Device, _bind_info_count: u32, _p_bind_infos: *const BindImageMemoryInfo<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(bind_image_memory2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory2\0"); let val = _f(cname); if val.is_null() { bind_image_memory2 } else { ::core::mem::transmute(val) } }, get_device_group_peer_memory_features: unsafe { unsafe extern "system" fn get_device_group_peer_memory_features( _device: crate::vk::Device, _heap_index: u32, _local_device_index: u32, _remote_device_index: u32, _p_peer_memory_features: *mut PeerMemoryFeatureFlags, ) { panic!(concat!( "Unable to load ", stringify!(get_device_group_peer_memory_features) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceGroupPeerMemoryFeatures\0"); let val = _f(cname); if val.is_null() { get_device_group_peer_memory_features } else { ::core::mem::transmute(val) } }, cmd_set_device_mask: unsafe { unsafe extern "system" fn cmd_set_device_mask( _command_buffer: CommandBuffer, _device_mask: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_device_mask))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDeviceMask\0"); let val = _f(cname); if val.is_null() { cmd_set_device_mask } else { ::core::mem::transmute(val) } }, cmd_dispatch_base: unsafe { unsafe extern "system" fn cmd_dispatch_base( _command_buffer: CommandBuffer, _base_group_x: u32, _base_group_y: u32, _base_group_z: u32, _group_count_x: u32, _group_count_y: u32, _group_count_z: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_dispatch_base))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchBase\0"); let val = _f(cname); if val.is_null() { cmd_dispatch_base } else { ::core::mem::transmute(val) } }, get_image_memory_requirements2: unsafe { unsafe extern "system" fn get_image_memory_requirements2( _device: crate::vk::Device, _p_info: *const ImageMemoryRequirementsInfo2<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_memory_requirements2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageMemoryRequirements2\0"); let val = _f(cname); if val.is_null() { get_image_memory_requirements2 } else { ::core::mem::transmute(val) } }, get_buffer_memory_requirements2: unsafe { unsafe extern "system" fn get_buffer_memory_requirements2( _device: crate::vk::Device, _p_info: *const BufferMemoryRequirementsInfo2<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_buffer_memory_requirements2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferMemoryRequirements2\0"); let val = _f(cname); if val.is_null() { get_buffer_memory_requirements2 } else { ::core::mem::transmute(val) } }, get_image_sparse_memory_requirements2: unsafe { unsafe extern "system" fn get_image_sparse_memory_requirements2( _device: crate::vk::Device, _p_info: *const ImageSparseMemoryRequirementsInfo2<'_>, _p_sparse_memory_requirement_count: *mut u32, _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_image_sparse_memory_requirements2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetImageSparseMemoryRequirements2\0"); let val = _f(cname); if val.is_null() { get_image_sparse_memory_requirements2 } else { ::core::mem::transmute(val) } }, trim_command_pool: unsafe { unsafe extern "system" fn trim_command_pool( _device: crate::vk::Device, _command_pool: CommandPool, _flags: CommandPoolTrimFlags, ) { panic!(concat!("Unable to load ", stringify!(trim_command_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkTrimCommandPool\0"); let val = _f(cname); if val.is_null() { trim_command_pool } else { ::core::mem::transmute(val) } }, get_device_queue2: unsafe { unsafe extern "system" fn get_device_queue2( _device: crate::vk::Device, _p_queue_info: *const DeviceQueueInfo2<'_>, _p_queue: *mut Queue, ) { panic!(concat!("Unable to load ", stringify!(get_device_queue2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceQueue2\0"); let val = _f(cname); if val.is_null() { get_device_queue2 } else { ::core::mem::transmute(val) } }, create_sampler_ycbcr_conversion: unsafe { unsafe extern "system" fn create_sampler_ycbcr_conversion( _device: crate::vk::Device, _p_create_info: *const SamplerYcbcrConversionCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_ycbcr_conversion: *mut SamplerYcbcrConversion, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_sampler_ycbcr_conversion) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateSamplerYcbcrConversion\0"); let val = _f(cname); if val.is_null() { create_sampler_ycbcr_conversion } else { ::core::mem::transmute(val) } }, destroy_sampler_ycbcr_conversion: unsafe { unsafe extern "system" fn destroy_sampler_ycbcr_conversion( _device: crate::vk::Device, _ycbcr_conversion: SamplerYcbcrConversion, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_sampler_ycbcr_conversion) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroySamplerYcbcrConversion\0"); let val = _f(cname); if val.is_null() { destroy_sampler_ycbcr_conversion } else { ::core::mem::transmute(val) } }, create_descriptor_update_template: unsafe { unsafe extern "system" fn create_descriptor_update_template( _device: crate::vk::Device, _p_create_info: *const DescriptorUpdateTemplateCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_descriptor_update_template: *mut DescriptorUpdateTemplate, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_descriptor_update_template) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateDescriptorUpdateTemplate\0"); let val = _f(cname); if val.is_null() { create_descriptor_update_template } else { ::core::mem::transmute(val) } }, destroy_descriptor_update_template: unsafe { unsafe extern "system" fn destroy_descriptor_update_template( _device: crate::vk::Device, _descriptor_update_template: DescriptorUpdateTemplate, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_descriptor_update_template) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyDescriptorUpdateTemplate\0"); let val = _f(cname); if val.is_null() { destroy_descriptor_update_template } else { ::core::mem::transmute(val) } }, update_descriptor_set_with_template: unsafe { unsafe extern "system" fn update_descriptor_set_with_template( _device: crate::vk::Device, _descriptor_set: DescriptorSet, _descriptor_update_template: DescriptorUpdateTemplate, _p_data: *const c_void, ) { panic!(concat!( "Unable to load ", stringify!(update_descriptor_set_with_template) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkUpdateDescriptorSetWithTemplate\0"); let val = _f(cname); if val.is_null() { update_descriptor_set_with_template } else { ::core::mem::transmute(val) } }, get_descriptor_set_layout_support: unsafe { unsafe extern "system" fn get_descriptor_set_layout_support( _device: crate::vk::Device, _p_create_info: *const DescriptorSetLayoutCreateInfo<'_>, _p_support: *mut DescriptorSetLayoutSupport<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_descriptor_set_layout_support) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDescriptorSetLayoutSupport\0"); let val = _f(cname); if val.is_null() { get_descriptor_set_layout_support } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.2 entry point function pointers"] pub struct EntryFnV1_2; #[derive(Clone)] #[doc = "Raw Vulkan 1.2 instance-level function pointers"] pub struct InstanceFnV1_2; #[derive(Clone)] #[doc = "Raw Vulkan 1.2 device-level function pointers"] pub struct DeviceFnV1_2 { pub cmd_draw_indirect_count: PFN_vkCmdDrawIndirectCount, pub cmd_draw_indexed_indirect_count: PFN_vkCmdDrawIndexedIndirectCount, pub create_render_pass2: PFN_vkCreateRenderPass2, pub cmd_begin_render_pass2: PFN_vkCmdBeginRenderPass2, pub cmd_next_subpass2: PFN_vkCmdNextSubpass2, pub cmd_end_render_pass2: PFN_vkCmdEndRenderPass2, pub reset_query_pool: PFN_vkResetQueryPool, pub get_semaphore_counter_value: PFN_vkGetSemaphoreCounterValue, pub wait_semaphores: PFN_vkWaitSemaphores, pub signal_semaphore: PFN_vkSignalSemaphore, pub get_buffer_device_address: PFN_vkGetBufferDeviceAddress, pub get_buffer_opaque_capture_address: PFN_vkGetBufferOpaqueCaptureAddress, pub get_device_memory_opaque_capture_address: PFN_vkGetDeviceMemoryOpaqueCaptureAddress, } unsafe impl Send for DeviceFnV1_2 {} unsafe impl Sync for DeviceFnV1_2 {} impl DeviceFnV1_2 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { cmd_draw_indirect_count: unsafe { unsafe extern "system" fn cmd_draw_indirect_count( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indirect_count) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCount\0"); let val = _f(cname); if val.is_null() { cmd_draw_indirect_count } else { ::core::mem::transmute(val) } }, cmd_draw_indexed_indirect_count: unsafe { unsafe extern "system" fn cmd_draw_indexed_indirect_count( _command_buffer: CommandBuffer, _buffer: Buffer, _offset: DeviceSize, _count_buffer: Buffer, _count_buffer_offset: DeviceSize, _max_draw_count: u32, _stride: u32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_draw_indexed_indirect_count) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndexedIndirectCount\0"); let val = _f(cname); if val.is_null() { cmd_draw_indexed_indirect_count } else { ::core::mem::transmute(val) } }, create_render_pass2: unsafe { unsafe extern "system" fn create_render_pass2( _device: crate::vk::Device, _p_create_info: *const RenderPassCreateInfo2<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_render_pass: *mut RenderPass, ) -> Result { panic!(concat!("Unable to load ", stringify!(create_render_pass2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass2\0"); let val = _f(cname); if val.is_null() { create_render_pass2 } else { ::core::mem::transmute(val) } }, cmd_begin_render_pass2: unsafe { unsafe extern "system" fn cmd_begin_render_pass2( _command_buffer: CommandBuffer, _p_render_pass_begin: *const RenderPassBeginInfo<'_>, _p_subpass_begin_info: *const SubpassBeginInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_begin_render_pass2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass2\0"); let val = _f(cname); if val.is_null() { cmd_begin_render_pass2 } else { ::core::mem::transmute(val) } }, cmd_next_subpass2: unsafe { unsafe extern "system" fn cmd_next_subpass2( _command_buffer: CommandBuffer, _p_subpass_begin_info: *const SubpassBeginInfo<'_>, _p_subpass_end_info: *const SubpassEndInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_next_subpass2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass2\0"); let val = _f(cname); if val.is_null() { cmd_next_subpass2 } else { ::core::mem::transmute(val) } }, cmd_end_render_pass2: unsafe { unsafe extern "system" fn cmd_end_render_pass2( _command_buffer: CommandBuffer, _p_subpass_end_info: *const SubpassEndInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_end_render_pass2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass2\0"); let val = _f(cname); if val.is_null() { cmd_end_render_pass2 } else { ::core::mem::transmute(val) } }, reset_query_pool: unsafe { unsafe extern "system" fn reset_query_pool( _device: crate::vk::Device, _query_pool: QueryPool, _first_query: u32, _query_count: u32, ) { panic!(concat!("Unable to load ", stringify!(reset_query_pool))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkResetQueryPool\0"); let val = _f(cname); if val.is_null() { reset_query_pool } else { ::core::mem::transmute(val) } }, get_semaphore_counter_value: unsafe { unsafe extern "system" fn get_semaphore_counter_value( _device: crate::vk::Device, _semaphore: Semaphore, _p_value: *mut u64, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_semaphore_counter_value) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetSemaphoreCounterValue\0"); let val = _f(cname); if val.is_null() { get_semaphore_counter_value } else { ::core::mem::transmute(val) } }, wait_semaphores: unsafe { unsafe extern "system" fn wait_semaphores( _device: crate::vk::Device, _p_wait_info: *const SemaphoreWaitInfo<'_>, _timeout: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(wait_semaphores))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkWaitSemaphores\0"); let val = _f(cname); if val.is_null() { wait_semaphores } else { ::core::mem::transmute(val) } }, signal_semaphore: unsafe { unsafe extern "system" fn signal_semaphore( _device: crate::vk::Device, _p_signal_info: *const SemaphoreSignalInfo<'_>, ) -> Result { panic!(concat!("Unable to load ", stringify!(signal_semaphore))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSignalSemaphore\0"); let val = _f(cname); if val.is_null() { signal_semaphore } else { ::core::mem::transmute(val) } }, get_buffer_device_address: unsafe { unsafe extern "system" fn get_buffer_device_address( _device: crate::vk::Device, _p_info: *const BufferDeviceAddressInfo<'_>, ) -> DeviceAddress { panic!(concat!( "Unable to load ", stringify!(get_buffer_device_address) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferDeviceAddress\0"); let val = _f(cname); if val.is_null() { get_buffer_device_address } else { ::core::mem::transmute(val) } }, get_buffer_opaque_capture_address: unsafe { unsafe extern "system" fn get_buffer_opaque_capture_address( _device: crate::vk::Device, _p_info: *const BufferDeviceAddressInfo<'_>, ) -> u64 { panic!(concat!( "Unable to load ", stringify!(get_buffer_opaque_capture_address) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetBufferOpaqueCaptureAddress\0"); let val = _f(cname); if val.is_null() { get_buffer_opaque_capture_address } else { ::core::mem::transmute(val) } }, get_device_memory_opaque_capture_address: unsafe { unsafe extern "system" fn get_device_memory_opaque_capture_address( _device: crate::vk::Device, _p_info: *const DeviceMemoryOpaqueCaptureAddressInfo<'_>, ) -> u64 { panic!(concat!( "Unable to load ", stringify!(get_device_memory_opaque_capture_address) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceMemoryOpaqueCaptureAddress\0"); let val = _f(cname); if val.is_null() { get_device_memory_opaque_capture_address } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.3 entry point function pointers"] pub struct EntryFnV1_3; #[derive(Clone)] #[doc = "Raw Vulkan 1.3 instance-level function pointers"] pub struct InstanceFnV1_3 { pub get_physical_device_tool_properties: PFN_vkGetPhysicalDeviceToolProperties, } unsafe impl Send for InstanceFnV1_3 {} unsafe impl Sync for InstanceFnV1_3 {} impl InstanceFnV1_3 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { get_physical_device_tool_properties: unsafe { unsafe extern "system" fn get_physical_device_tool_properties( _physical_device: PhysicalDevice, _p_tool_count: *mut u32, _p_tool_properties: *mut PhysicalDeviceToolProperties<'_>, ) -> Result { panic!(concat!( "Unable to load ", stringify!(get_physical_device_tool_properties) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPhysicalDeviceToolProperties\0"); let val = _f(cname); if val.is_null() { get_physical_device_tool_properties } else { ::core::mem::transmute(val) } }, } } } #[derive(Clone)] #[doc = "Raw Vulkan 1.3 device-level function pointers"] pub struct DeviceFnV1_3 { pub create_private_data_slot: PFN_vkCreatePrivateDataSlot, pub destroy_private_data_slot: PFN_vkDestroyPrivateDataSlot, pub set_private_data: PFN_vkSetPrivateData, pub get_private_data: PFN_vkGetPrivateData, pub cmd_set_event2: PFN_vkCmdSetEvent2, pub cmd_reset_event2: PFN_vkCmdResetEvent2, pub cmd_wait_events2: PFN_vkCmdWaitEvents2, pub cmd_pipeline_barrier2: PFN_vkCmdPipelineBarrier2, pub cmd_write_timestamp2: PFN_vkCmdWriteTimestamp2, pub queue_submit2: PFN_vkQueueSubmit2, pub cmd_copy_buffer2: PFN_vkCmdCopyBuffer2, pub cmd_copy_image2: PFN_vkCmdCopyImage2, pub cmd_copy_buffer_to_image2: PFN_vkCmdCopyBufferToImage2, pub cmd_copy_image_to_buffer2: PFN_vkCmdCopyImageToBuffer2, pub cmd_blit_image2: PFN_vkCmdBlitImage2, pub cmd_resolve_image2: PFN_vkCmdResolveImage2, pub cmd_begin_rendering: PFN_vkCmdBeginRendering, pub cmd_end_rendering: PFN_vkCmdEndRendering, pub cmd_set_cull_mode: PFN_vkCmdSetCullMode, pub cmd_set_front_face: PFN_vkCmdSetFrontFace, pub cmd_set_primitive_topology: PFN_vkCmdSetPrimitiveTopology, pub cmd_set_viewport_with_count: PFN_vkCmdSetViewportWithCount, pub cmd_set_scissor_with_count: PFN_vkCmdSetScissorWithCount, pub cmd_bind_vertex_buffers2: PFN_vkCmdBindVertexBuffers2, pub cmd_set_depth_test_enable: PFN_vkCmdSetDepthTestEnable, pub cmd_set_depth_write_enable: PFN_vkCmdSetDepthWriteEnable, pub cmd_set_depth_compare_op: PFN_vkCmdSetDepthCompareOp, pub cmd_set_depth_bounds_test_enable: PFN_vkCmdSetDepthBoundsTestEnable, pub cmd_set_stencil_test_enable: PFN_vkCmdSetStencilTestEnable, pub cmd_set_stencil_op: PFN_vkCmdSetStencilOp, pub cmd_set_rasterizer_discard_enable: PFN_vkCmdSetRasterizerDiscardEnable, pub cmd_set_depth_bias_enable: PFN_vkCmdSetDepthBiasEnable, pub cmd_set_primitive_restart_enable: PFN_vkCmdSetPrimitiveRestartEnable, pub get_device_buffer_memory_requirements: PFN_vkGetDeviceBufferMemoryRequirements, pub get_device_image_memory_requirements: PFN_vkGetDeviceImageMemoryRequirements, pub get_device_image_sparse_memory_requirements: PFN_vkGetDeviceImageSparseMemoryRequirements, } unsafe impl Send for DeviceFnV1_3 {} unsafe impl Sync for DeviceFnV1_3 {} impl DeviceFnV1_3 { pub fn load *const c_void>(mut f: F) -> Self { Self::load_erased(&mut f) } fn load_erased(_f: &mut dyn FnMut(&CStr) -> *const c_void) -> Self { Self { create_private_data_slot: unsafe { unsafe extern "system" fn create_private_data_slot( _device: crate::vk::Device, _p_create_info: *const PrivateDataSlotCreateInfo<'_>, _p_allocator: *const AllocationCallbacks<'_>, _p_private_data_slot: *mut PrivateDataSlot, ) -> Result { panic!(concat!( "Unable to load ", stringify!(create_private_data_slot) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCreatePrivateDataSlot\0"); let val = _f(cname); if val.is_null() { create_private_data_slot } else { ::core::mem::transmute(val) } }, destroy_private_data_slot: unsafe { unsafe extern "system" fn destroy_private_data_slot( _device: crate::vk::Device, _private_data_slot: PrivateDataSlot, _p_allocator: *const AllocationCallbacks<'_>, ) { panic!(concat!( "Unable to load ", stringify!(destroy_private_data_slot) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkDestroyPrivateDataSlot\0"); let val = _f(cname); if val.is_null() { destroy_private_data_slot } else { ::core::mem::transmute(val) } }, set_private_data: unsafe { unsafe extern "system" fn set_private_data( _device: crate::vk::Device, _object_type: ObjectType, _object_handle: u64, _private_data_slot: PrivateDataSlot, _data: u64, ) -> Result { panic!(concat!("Unable to load ", stringify!(set_private_data))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkSetPrivateData\0"); let val = _f(cname); if val.is_null() { set_private_data } else { ::core::mem::transmute(val) } }, get_private_data: unsafe { unsafe extern "system" fn get_private_data( _device: crate::vk::Device, _object_type: ObjectType, _object_handle: u64, _private_data_slot: PrivateDataSlot, _p_data: *mut u64, ) { panic!(concat!("Unable to load ", stringify!(get_private_data))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetPrivateData\0"); let val = _f(cname); if val.is_null() { get_private_data } else { ::core::mem::transmute(val) } }, cmd_set_event2: unsafe { unsafe extern "system" fn cmd_set_event2( _command_buffer: CommandBuffer, _event: Event, _p_dependency_info: *const DependencyInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_event2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent2\0"); let val = _f(cname); if val.is_null() { cmd_set_event2 } else { ::core::mem::transmute(val) } }, cmd_reset_event2: unsafe { unsafe extern "system" fn cmd_reset_event2( _command_buffer: CommandBuffer, _event: Event, _stage_mask: PipelineStageFlags2, ) { panic!(concat!("Unable to load ", stringify!(cmd_reset_event2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent2\0"); let val = _f(cname); if val.is_null() { cmd_reset_event2 } else { ::core::mem::transmute(val) } }, cmd_wait_events2: unsafe { unsafe extern "system" fn cmd_wait_events2( _command_buffer: CommandBuffer, _event_count: u32, _p_events: *const Event, _p_dependency_infos: *const DependencyInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_wait_events2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents2\0"); let val = _f(cname); if val.is_null() { cmd_wait_events2 } else { ::core::mem::transmute(val) } }, cmd_pipeline_barrier2: unsafe { unsafe extern "system" fn cmd_pipeline_barrier2( _command_buffer: CommandBuffer, _p_dependency_info: *const DependencyInfo<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_pipeline_barrier2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier2\0"); let val = _f(cname); if val.is_null() { cmd_pipeline_barrier2 } else { ::core::mem::transmute(val) } }, cmd_write_timestamp2: unsafe { unsafe extern "system" fn cmd_write_timestamp2( _command_buffer: CommandBuffer, _stage: PipelineStageFlags2, _query_pool: QueryPool, _query: u32, ) { panic!(concat!("Unable to load ", stringify!(cmd_write_timestamp2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp2\0"); let val = _f(cname); if val.is_null() { cmd_write_timestamp2 } else { ::core::mem::transmute(val) } }, queue_submit2: unsafe { unsafe extern "system" fn queue_submit2( _queue: Queue, _submit_count: u32, _p_submits: *const SubmitInfo2<'_>, _fence: Fence, ) -> Result { panic!(concat!("Unable to load ", stringify!(queue_submit2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit2\0"); let val = _f(cname); if val.is_null() { queue_submit2 } else { ::core::mem::transmute(val) } }, cmd_copy_buffer2: unsafe { unsafe extern "system" fn cmd_copy_buffer2( _command_buffer: CommandBuffer, _p_copy_buffer_info: *const CopyBufferInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer2\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer2 } else { ::core::mem::transmute(val) } }, cmd_copy_image2: unsafe { unsafe extern "system" fn cmd_copy_image2( _command_buffer: CommandBuffer, _p_copy_image_info: *const CopyImageInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_copy_image2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage2\0"); let val = _f(cname); if val.is_null() { cmd_copy_image2 } else { ::core::mem::transmute(val) } }, cmd_copy_buffer_to_image2: unsafe { unsafe extern "system" fn cmd_copy_buffer_to_image2( _command_buffer: CommandBuffer, _p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_buffer_to_image2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBufferToImage2\0"); let val = _f(cname); if val.is_null() { cmd_copy_buffer_to_image2 } else { ::core::mem::transmute(val) } }, cmd_copy_image_to_buffer2: unsafe { unsafe extern "system" fn cmd_copy_image_to_buffer2( _command_buffer: CommandBuffer, _p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(cmd_copy_image_to_buffer2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImageToBuffer2\0"); let val = _f(cname); if val.is_null() { cmd_copy_image_to_buffer2 } else { ::core::mem::transmute(val) } }, cmd_blit_image2: unsafe { unsafe extern "system" fn cmd_blit_image2( _command_buffer: CommandBuffer, _p_blit_image_info: *const BlitImageInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_blit_image2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage2\0"); let val = _f(cname); if val.is_null() { cmd_blit_image2 } else { ::core::mem::transmute(val) } }, cmd_resolve_image2: unsafe { unsafe extern "system" fn cmd_resolve_image2( _command_buffer: CommandBuffer, _p_resolve_image_info: *const ResolveImageInfo2<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_resolve_image2))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage2\0"); let val = _f(cname); if val.is_null() { cmd_resolve_image2 } else { ::core::mem::transmute(val) } }, cmd_begin_rendering: unsafe { unsafe extern "system" fn cmd_begin_rendering( _command_buffer: CommandBuffer, _p_rendering_info: *const RenderingInfo<'_>, ) { panic!(concat!("Unable to load ", stringify!(cmd_begin_rendering))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRendering\0"); let val = _f(cname); if val.is_null() { cmd_begin_rendering } else { ::core::mem::transmute(val) } }, cmd_end_rendering: unsafe { unsafe extern "system" fn cmd_end_rendering(_command_buffer: CommandBuffer) { panic!(concat!("Unable to load ", stringify!(cmd_end_rendering))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRendering\0"); let val = _f(cname); if val.is_null() { cmd_end_rendering } else { ::core::mem::transmute(val) } }, cmd_set_cull_mode: unsafe { unsafe extern "system" fn cmd_set_cull_mode( _command_buffer: CommandBuffer, _cull_mode: CullModeFlags, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_cull_mode))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCullMode\0"); let val = _f(cname); if val.is_null() { cmd_set_cull_mode } else { ::core::mem::transmute(val) } }, cmd_set_front_face: unsafe { unsafe extern "system" fn cmd_set_front_face( _command_buffer: CommandBuffer, _front_face: FrontFace, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_front_face))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetFrontFace\0"); let val = _f(cname); if val.is_null() { cmd_set_front_face } else { ::core::mem::transmute(val) } }, cmd_set_primitive_topology: unsafe { unsafe extern "system" fn cmd_set_primitive_topology( _command_buffer: CommandBuffer, _primitive_topology: PrimitiveTopology, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_topology) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPrimitiveTopology\0"); let val = _f(cname); if val.is_null() { cmd_set_primitive_topology } else { ::core::mem::transmute(val) } }, cmd_set_viewport_with_count: unsafe { unsafe extern "system" fn cmd_set_viewport_with_count( _command_buffer: CommandBuffer, _viewport_count: u32, _p_viewports: *const Viewport, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_viewport_with_count) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportWithCount\0"); let val = _f(cname); if val.is_null() { cmd_set_viewport_with_count } else { ::core::mem::transmute(val) } }, cmd_set_scissor_with_count: unsafe { unsafe extern "system" fn cmd_set_scissor_with_count( _command_buffer: CommandBuffer, _scissor_count: u32, _p_scissors: *const Rect2D, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_scissor_with_count) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissorWithCount\0"); let val = _f(cname); if val.is_null() { cmd_set_scissor_with_count } else { ::core::mem::transmute(val) } }, cmd_bind_vertex_buffers2: unsafe { unsafe extern "system" fn cmd_bind_vertex_buffers2( _command_buffer: CommandBuffer, _first_binding: u32, _binding_count: u32, _p_buffers: *const Buffer, _p_offsets: *const DeviceSize, _p_sizes: *const DeviceSize, _p_strides: *const DeviceSize, ) { panic!(concat!( "Unable to load ", stringify!(cmd_bind_vertex_buffers2) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers2\0"); let val = _f(cname); if val.is_null() { cmd_bind_vertex_buffers2 } else { ::core::mem::transmute(val) } }, cmd_set_depth_test_enable: unsafe { unsafe extern "system" fn cmd_set_depth_test_enable( _command_buffer: CommandBuffer, _depth_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_test_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthTestEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_test_enable } else { ::core::mem::transmute(val) } }, cmd_set_depth_write_enable: unsafe { unsafe extern "system" fn cmd_set_depth_write_enable( _command_buffer: CommandBuffer, _depth_write_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_write_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthWriteEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_write_enable } else { ::core::mem::transmute(val) } }, cmd_set_depth_compare_op: unsafe { unsafe extern "system" fn cmd_set_depth_compare_op( _command_buffer: CommandBuffer, _depth_compare_op: CompareOp, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_compare_op) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthCompareOp\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_compare_op } else { ::core::mem::transmute(val) } }, cmd_set_depth_bounds_test_enable: unsafe { unsafe extern "system" fn cmd_set_depth_bounds_test_enable( _command_buffer: CommandBuffer, _depth_bounds_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bounds_test_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBoundsTestEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bounds_test_enable } else { ::core::mem::transmute(val) } }, cmd_set_stencil_test_enable: unsafe { unsafe extern "system" fn cmd_set_stencil_test_enable( _command_buffer: CommandBuffer, _stencil_test_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_stencil_test_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilTestEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_test_enable } else { ::core::mem::transmute(val) } }, cmd_set_stencil_op: unsafe { unsafe extern "system" fn cmd_set_stencil_op( _command_buffer: CommandBuffer, _face_mask: StencilFaceFlags, _fail_op: StencilOp, _pass_op: StencilOp, _depth_fail_op: StencilOp, _compare_op: CompareOp, ) { panic!(concat!("Unable to load ", stringify!(cmd_set_stencil_op))) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilOp\0"); let val = _f(cname); if val.is_null() { cmd_set_stencil_op } else { ::core::mem::transmute(val) } }, cmd_set_rasterizer_discard_enable: unsafe { unsafe extern "system" fn cmd_set_rasterizer_discard_enable( _command_buffer: CommandBuffer, _rasterizer_discard_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_rasterizer_discard_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetRasterizerDiscardEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_rasterizer_discard_enable } else { ::core::mem::transmute(val) } }, cmd_set_depth_bias_enable: unsafe { unsafe extern "system" fn cmd_set_depth_bias_enable( _command_buffer: CommandBuffer, _depth_bias_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_depth_bias_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBiasEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_depth_bias_enable } else { ::core::mem::transmute(val) } }, cmd_set_primitive_restart_enable: unsafe { unsafe extern "system" fn cmd_set_primitive_restart_enable( _command_buffer: CommandBuffer, _primitive_restart_enable: Bool32, ) { panic!(concat!( "Unable to load ", stringify!(cmd_set_primitive_restart_enable) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPrimitiveRestartEnable\0"); let val = _f(cname); if val.is_null() { cmd_set_primitive_restart_enable } else { ::core::mem::transmute(val) } }, get_device_buffer_memory_requirements: unsafe { unsafe extern "system" fn get_device_buffer_memory_requirements( _device: crate::vk::Device, _p_info: *const DeviceBufferMemoryRequirements<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_buffer_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceBufferMemoryRequirements\0"); let val = _f(cname); if val.is_null() { get_device_buffer_memory_requirements } else { ::core::mem::transmute(val) } }, get_device_image_memory_requirements: unsafe { unsafe extern "system" fn get_device_image_memory_requirements( _device: crate::vk::Device, _p_info: *const DeviceImageMemoryRequirements<'_>, _p_memory_requirements: *mut MemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_image_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceImageMemoryRequirements\0"); let val = _f(cname); if val.is_null() { get_device_image_memory_requirements } else { ::core::mem::transmute(val) } }, get_device_image_sparse_memory_requirements: unsafe { unsafe extern "system" fn get_device_image_sparse_memory_requirements( _device: crate::vk::Device, _p_info: *const DeviceImageMemoryRequirements<'_>, _p_sparse_memory_requirement_count: *mut u32, _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ) { panic!(concat!( "Unable to load ", stringify!(get_device_image_sparse_memory_requirements) )) } let cname = CStr::from_bytes_with_nul_unchecked( b"vkGetDeviceImageSparseMemoryRequirements\0", ); let val = _f(cname); if val.is_null() { get_device_image_sparse_memory_requirements } else { ::core::mem::transmute(val) } }, } } } ash-0.38.0+1.3.281/src/util.rs000064400000000000000000000107711046102023000134150ustar 00000000000000use crate::vk; use core::ffi::c_void; use core::iter::Iterator; use core::marker::PhantomData; use core::mem::size_of; use core::slice; #[cfg(feature = "std")] use std::io; /// [`Align`] handles dynamic alignment. The is useful for dynamic uniform buffers where /// the alignment might be different. For example a 4x4 f32 matrix has a size of 64 bytes /// but the min alignment for a dynamic uniform buffer might be 256 bytes. A slice of `&[Mat4x4]` /// has a memory layout of `[[64 bytes], [64 bytes], [64 bytes]]`, but it might need to have a memory /// layout of `[[256 bytes], [256 bytes], [256 bytes]]`. /// [`Align::copy_from_slice`] will copy a slice of `&[T]` directly into the host memory without /// an additional allocation and with the correct alignment. #[derive(Debug, Clone)] pub struct Align { ptr: *mut c_void, elem_size: vk::DeviceSize, size: vk::DeviceSize, _m: PhantomData, } #[derive(Debug)] pub struct AlignIter<'a, T> { align: &'a mut Align, current: vk::DeviceSize, } impl Align { pub fn copy_from_slice(&mut self, slice: &[T]) { if self.elem_size == size_of::() as u64 { unsafe { let mapped_slice = slice::from_raw_parts_mut(self.ptr.cast(), slice.len()); mapped_slice.copy_from_slice(slice); } } else { for (i, val) in self.iter_mut().enumerate().take(slice.len()) { *val = slice[i]; } } } } fn calc_padding(adr: vk::DeviceSize, align: vk::DeviceSize) -> vk::DeviceSize { (align - adr % align) % align } impl Align { pub unsafe fn new(ptr: *mut c_void, alignment: vk::DeviceSize, size: vk::DeviceSize) -> Self { let padding = calc_padding(size_of::() as vk::DeviceSize, alignment); let elem_size = size_of::() as vk::DeviceSize + padding; assert!(calc_padding(size, alignment) == 0, "size must be aligned"); Self { ptr, elem_size, size, _m: PhantomData, } } pub fn iter_mut(&mut self) -> AlignIter<'_, T> { AlignIter { current: 0, align: self, } } } impl<'a, T: Copy + 'a> Iterator for AlignIter<'a, T> { type Item = &'a mut T; fn next(&mut self) -> Option { if self.current == self.align.size { return None; } unsafe { // Need to cast to *mut u8 because () has size 0 let ptr = (self.align.ptr.cast::()) .offset(self.current as isize) .cast(); self.current += self.align.elem_size; Some(&mut *ptr) } } } /// Decode SPIR-V from bytes. /// /// This function handles SPIR-V of arbitrary endianness gracefully, and returns correctly aligned /// storage. /// /// # Examples /// ```no_run /// // Decode SPIR-V from a file /// let mut file = std::fs::File::open("/path/to/shader.spv").unwrap(); /// let words = ash::util::read_spv(&mut file).unwrap(); /// ``` /// ``` /// // Decode SPIR-V from memory /// const SPIRV: &[u8] = &[ /// // ... /// # 0x03, 0x02, 0x23, 0x07, /// ]; /// let words = ash::util::read_spv(&mut std::io::Cursor::new(&SPIRV[..])).unwrap(); /// ``` #[cfg(feature = "std")] pub fn read_spv(x: &mut R) -> io::Result> { // TODO use stream_len() once it is stabilized and remove the subsequent rewind() call let size = x.seek(io::SeekFrom::End(0))?; x.rewind()?; if size % 4 != 0 { return Err(io::Error::new( io::ErrorKind::InvalidData, "input length not divisible by 4", )); } if size > usize::MAX as u64 { return Err(io::Error::new(io::ErrorKind::InvalidData, "input too long")); } let words = (size / 4) as usize; // https://github.com/ash-rs/ash/issues/354: // Zero-initialize the result to prevent read_exact from possibly // reading uninitialized memory. let mut result = vec![0u32; words]; x.read_exact(unsafe { slice::from_raw_parts_mut(result.as_mut_ptr().cast::(), words * 4) })?; const MAGIC_NUMBER: u32 = 0x0723_0203; if !result.is_empty() && result[0] == MAGIC_NUMBER.swap_bytes() { for word in &mut result { *word = word.swap_bytes(); } } if result.is_empty() || result[0] != MAGIC_NUMBER { return Err(io::Error::new( io::ErrorKind::InvalidData, "input missing SPIR-V magic number", )); } Ok(result) } ash-0.38.0+1.3.281/src/vk/aliases.rs000064400000000000000000000462401046102023000145010ustar 00000000000000use crate::vk::bitflags::*; use crate::vk::definitions::*; use crate::vk::enums::*; pub type GeometryFlagsNV = GeometryFlagsKHR; pub type GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR; pub type BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR; pub type PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags; pub type DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; pub type PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags; pub type SemaphoreWaitFlagsKHR = SemaphoreWaitFlags; pub type AccessFlags2KHR = AccessFlags2; pub type PipelineStageFlags2KHR = PipelineStageFlags2; pub type FormatFeatureFlags2KHR = FormatFeatureFlags2; pub type RenderingFlagsKHR = RenderingFlags; pub type PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; pub type MemoryAllocateFlagsKHR = MemoryAllocateFlags; pub type CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; pub type ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; pub type ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; pub type ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; pub type ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; pub type SemaphoreImportFlagsKHR = SemaphoreImportFlags; pub type ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; pub type ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; pub type FenceImportFlagsKHR = FenceImportFlags; pub type DescriptorBindingFlagsEXT = DescriptorBindingFlags; pub type ResolveModeFlagsKHR = ResolveModeFlags; pub type ToolPurposeFlagsEXT = ToolPurposeFlags; pub type SubmitFlagsKHR = SubmitFlags; pub type DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; pub type SamplerYcbcrConversionKHR = SamplerYcbcrConversion; pub type PrivateDataSlotEXT = PrivateDataSlot; pub type DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; pub type PointClippingBehaviorKHR = PointClippingBehavior; pub type QueueGlobalPriorityEXT = QueueGlobalPriorityKHR; pub type TimeDomainEXT = TimeDomainKHR; pub type SemaphoreTypeKHR = SemaphoreType; pub type CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR; pub type AccelerationStructureTypeNV = AccelerationStructureTypeKHR; pub type GeometryTypeNV = GeometryTypeKHR; pub type RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR; pub type LineRasterizationModeEXT = LineRasterizationModeKHR; pub type ScopeNV = ScopeKHR; pub type ComponentTypeNV = ComponentTypeKHR; pub type TessellationDomainOriginKHR = TessellationDomainOrigin; pub type SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; pub type SamplerYcbcrRangeKHR = SamplerYcbcrRange; pub type ChromaLocationKHR = ChromaLocation; pub type SamplerReductionModeEXT = SamplerReductionMode; pub type ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence; pub type DriverIdKHR = DriverId; pub type DevicePrivateDataCreateInfoEXT<'a> = DevicePrivateDataCreateInfo<'a>; pub type PrivateDataSlotCreateInfoEXT<'a> = PrivateDataSlotCreateInfo<'a>; pub type PhysicalDevicePrivateDataFeaturesEXT<'a> = PhysicalDevicePrivateDataFeatures<'a>; pub type PhysicalDeviceFeatures2KHR<'a> = PhysicalDeviceFeatures2<'a>; pub type PhysicalDeviceProperties2KHR<'a> = PhysicalDeviceProperties2<'a>; pub type FormatProperties2KHR<'a> = FormatProperties2<'a>; pub type ImageFormatProperties2KHR<'a> = ImageFormatProperties2<'a>; pub type PhysicalDeviceImageFormatInfo2KHR<'a> = PhysicalDeviceImageFormatInfo2<'a>; pub type QueueFamilyProperties2KHR<'a> = QueueFamilyProperties2<'a>; pub type PhysicalDeviceMemoryProperties2KHR<'a> = PhysicalDeviceMemoryProperties2<'a>; pub type SparseImageFormatProperties2KHR<'a> = SparseImageFormatProperties2<'a>; pub type PhysicalDeviceSparseImageFormatInfo2KHR<'a> = PhysicalDeviceSparseImageFormatInfo2<'a>; pub type ConformanceVersionKHR = ConformanceVersion; pub type PhysicalDeviceDriverPropertiesKHR<'a> = PhysicalDeviceDriverProperties<'a>; pub type PhysicalDeviceVariablePointersFeaturesKHR<'a> = PhysicalDeviceVariablePointersFeatures<'a>; pub type PhysicalDeviceVariablePointerFeaturesKHR<'a> = PhysicalDeviceVariablePointersFeatures<'a>; pub type PhysicalDeviceVariablePointerFeatures<'a> = PhysicalDeviceVariablePointersFeatures<'a>; pub type ExternalMemoryPropertiesKHR = ExternalMemoryProperties; pub type PhysicalDeviceExternalImageFormatInfoKHR<'a> = PhysicalDeviceExternalImageFormatInfo<'a>; pub type ExternalImageFormatPropertiesKHR<'a> = ExternalImageFormatProperties<'a>; pub type PhysicalDeviceExternalBufferInfoKHR<'a> = PhysicalDeviceExternalBufferInfo<'a>; pub type ExternalBufferPropertiesKHR<'a> = ExternalBufferProperties<'a>; pub type PhysicalDeviceIDPropertiesKHR<'a> = PhysicalDeviceIDProperties<'a>; pub type ExternalMemoryImageCreateInfoKHR<'a> = ExternalMemoryImageCreateInfo<'a>; pub type ExternalMemoryBufferCreateInfoKHR<'a> = ExternalMemoryBufferCreateInfo<'a>; pub type ExportMemoryAllocateInfoKHR<'a> = ExportMemoryAllocateInfo<'a>; pub type PhysicalDeviceExternalSemaphoreInfoKHR<'a> = PhysicalDeviceExternalSemaphoreInfo<'a>; pub type ExternalSemaphorePropertiesKHR<'a> = ExternalSemaphoreProperties<'a>; pub type ExportSemaphoreCreateInfoKHR<'a> = ExportSemaphoreCreateInfo<'a>; pub type PhysicalDeviceExternalFenceInfoKHR<'a> = PhysicalDeviceExternalFenceInfo<'a>; pub type ExternalFencePropertiesKHR<'a> = ExternalFenceProperties<'a>; pub type ExportFenceCreateInfoKHR<'a> = ExportFenceCreateInfo<'a>; pub type PhysicalDeviceMultiviewFeaturesKHR<'a> = PhysicalDeviceMultiviewFeatures<'a>; pub type PhysicalDeviceMultiviewPropertiesKHR<'a> = PhysicalDeviceMultiviewProperties<'a>; pub type RenderPassMultiviewCreateInfoKHR<'a> = RenderPassMultiviewCreateInfo<'a>; pub type PhysicalDeviceGroupPropertiesKHR<'a> = PhysicalDeviceGroupProperties<'a>; pub type MemoryAllocateFlagsInfoKHR<'a> = MemoryAllocateFlagsInfo<'a>; pub type BindBufferMemoryInfoKHR<'a> = BindBufferMemoryInfo<'a>; pub type BindBufferMemoryDeviceGroupInfoKHR<'a> = BindBufferMemoryDeviceGroupInfo<'a>; pub type BindImageMemoryInfoKHR<'a> = BindImageMemoryInfo<'a>; pub type BindImageMemoryDeviceGroupInfoKHR<'a> = BindImageMemoryDeviceGroupInfo<'a>; pub type DeviceGroupRenderPassBeginInfoKHR<'a> = DeviceGroupRenderPassBeginInfo<'a>; pub type DeviceGroupCommandBufferBeginInfoKHR<'a> = DeviceGroupCommandBufferBeginInfo<'a>; pub type DeviceGroupSubmitInfoKHR<'a> = DeviceGroupSubmitInfo<'a>; pub type DeviceGroupBindSparseInfoKHR<'a> = DeviceGroupBindSparseInfo<'a>; pub type DeviceGroupDeviceCreateInfoKHR<'a> = DeviceGroupDeviceCreateInfo<'a>; pub type DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; pub type DescriptorUpdateTemplateCreateInfoKHR<'a> = DescriptorUpdateTemplateCreateInfo<'a>; pub type InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; pub type RenderPassInputAttachmentAspectCreateInfoKHR<'a> = RenderPassInputAttachmentAspectCreateInfo<'a>; pub type PhysicalDevice16BitStorageFeaturesKHR<'a> = PhysicalDevice16BitStorageFeatures<'a>; pub type PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR<'a> = PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'a>; pub type BufferMemoryRequirementsInfo2KHR<'a> = BufferMemoryRequirementsInfo2<'a>; pub type DeviceBufferMemoryRequirementsKHR<'a> = DeviceBufferMemoryRequirements<'a>; pub type ImageMemoryRequirementsInfo2KHR<'a> = ImageMemoryRequirementsInfo2<'a>; pub type ImageSparseMemoryRequirementsInfo2KHR<'a> = ImageSparseMemoryRequirementsInfo2<'a>; pub type DeviceImageMemoryRequirementsKHR<'a> = DeviceImageMemoryRequirements<'a>; pub type MemoryRequirements2KHR<'a> = MemoryRequirements2<'a>; pub type SparseImageMemoryRequirements2KHR<'a> = SparseImageMemoryRequirements2<'a>; pub type PhysicalDevicePointClippingPropertiesKHR<'a> = PhysicalDevicePointClippingProperties<'a>; pub type MemoryDedicatedRequirementsKHR<'a> = MemoryDedicatedRequirements<'a>; pub type MemoryDedicatedAllocateInfoKHR<'a> = MemoryDedicatedAllocateInfo<'a>; pub type ImageViewUsageCreateInfoKHR<'a> = ImageViewUsageCreateInfo<'a>; pub type PipelineTessellationDomainOriginStateCreateInfoKHR<'a> = PipelineTessellationDomainOriginStateCreateInfo<'a>; pub type SamplerYcbcrConversionInfoKHR<'a> = SamplerYcbcrConversionInfo<'a>; pub type SamplerYcbcrConversionCreateInfoKHR<'a> = SamplerYcbcrConversionCreateInfo<'a>; pub type BindImagePlaneMemoryInfoKHR<'a> = BindImagePlaneMemoryInfo<'a>; pub type ImagePlaneMemoryRequirementsInfoKHR<'a> = ImagePlaneMemoryRequirementsInfo<'a>; pub type PhysicalDeviceSamplerYcbcrConversionFeaturesKHR<'a> = PhysicalDeviceSamplerYcbcrConversionFeatures<'a>; pub type SamplerYcbcrConversionImageFormatPropertiesKHR<'a> = SamplerYcbcrConversionImageFormatProperties<'a>; pub type PhysicalDeviceSamplerFilterMinmaxPropertiesEXT<'a> = PhysicalDeviceSamplerFilterMinmaxProperties<'a>; pub type SamplerReductionModeCreateInfoEXT<'a> = SamplerReductionModeCreateInfo<'a>; pub type PhysicalDeviceInlineUniformBlockFeaturesEXT<'a> = PhysicalDeviceInlineUniformBlockFeatures<'a>; pub type PhysicalDeviceInlineUniformBlockPropertiesEXT<'a> = PhysicalDeviceInlineUniformBlockProperties<'a>; pub type WriteDescriptorSetInlineUniformBlockEXT<'a> = WriteDescriptorSetInlineUniformBlock<'a>; pub type DescriptorPoolInlineUniformBlockCreateInfoEXT<'a> = DescriptorPoolInlineUniformBlockCreateInfo<'a>; pub type ImageFormatListCreateInfoKHR<'a> = ImageFormatListCreateInfo<'a>; pub type PhysicalDeviceMaintenance3PropertiesKHR<'a> = PhysicalDeviceMaintenance3Properties<'a>; pub type PhysicalDeviceMaintenance4FeaturesKHR<'a> = PhysicalDeviceMaintenance4Features<'a>; pub type PhysicalDeviceMaintenance4PropertiesKHR<'a> = PhysicalDeviceMaintenance4Properties<'a>; pub type DescriptorSetLayoutSupportKHR<'a> = DescriptorSetLayoutSupport<'a>; pub type PhysicalDeviceShaderDrawParameterFeatures<'a> = PhysicalDeviceShaderDrawParametersFeatures<'a>; pub type PhysicalDeviceShaderFloat16Int8FeaturesKHR<'a> = PhysicalDeviceShaderFloat16Int8Features<'a>; pub type PhysicalDeviceFloat16Int8FeaturesKHR<'a> = PhysicalDeviceShaderFloat16Int8Features<'a>; pub type PhysicalDeviceFloatControlsPropertiesKHR<'a> = PhysicalDeviceFloatControlsProperties<'a>; pub type PhysicalDeviceHostQueryResetFeaturesEXT<'a> = PhysicalDeviceHostQueryResetFeatures<'a>; pub type DeviceQueueGlobalPriorityCreateInfoEXT<'a> = DeviceQueueGlobalPriorityCreateInfoKHR<'a>; pub type PhysicalDeviceGlobalPriorityQueryFeaturesEXT<'a> = PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'a>; pub type QueueFamilyGlobalPriorityPropertiesEXT<'a> = QueueFamilyGlobalPriorityPropertiesKHR<'a>; pub type CalibratedTimestampInfoEXT<'a> = CalibratedTimestampInfoKHR<'a>; pub type PhysicalDeviceDescriptorIndexingFeaturesEXT<'a> = PhysicalDeviceDescriptorIndexingFeatures<'a>; pub type PhysicalDeviceDescriptorIndexingPropertiesEXT<'a> = PhysicalDeviceDescriptorIndexingProperties<'a>; pub type DescriptorSetLayoutBindingFlagsCreateInfoEXT<'a> = DescriptorSetLayoutBindingFlagsCreateInfo<'a>; pub type DescriptorSetVariableDescriptorCountAllocateInfoEXT<'a> = DescriptorSetVariableDescriptorCountAllocateInfo<'a>; pub type DescriptorSetVariableDescriptorCountLayoutSupportEXT<'a> = DescriptorSetVariableDescriptorCountLayoutSupport<'a>; pub type AttachmentDescription2KHR<'a> = AttachmentDescription2<'a>; pub type AttachmentReference2KHR<'a> = AttachmentReference2<'a>; pub type SubpassDescription2KHR<'a> = SubpassDescription2<'a>; pub type SubpassDependency2KHR<'a> = SubpassDependency2<'a>; pub type RenderPassCreateInfo2KHR<'a> = RenderPassCreateInfo2<'a>; pub type SubpassBeginInfoKHR<'a> = SubpassBeginInfo<'a>; pub type SubpassEndInfoKHR<'a> = SubpassEndInfo<'a>; pub type PhysicalDeviceTimelineSemaphoreFeaturesKHR<'a> = PhysicalDeviceTimelineSemaphoreFeatures<'a>; pub type PhysicalDeviceTimelineSemaphorePropertiesKHR<'a> = PhysicalDeviceTimelineSemaphoreProperties<'a>; pub type SemaphoreTypeCreateInfoKHR<'a> = SemaphoreTypeCreateInfo<'a>; pub type TimelineSemaphoreSubmitInfoKHR<'a> = TimelineSemaphoreSubmitInfo<'a>; pub type SemaphoreWaitInfoKHR<'a> = SemaphoreWaitInfo<'a>; pub type SemaphoreSignalInfoKHR<'a> = SemaphoreSignalInfo<'a>; pub type VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescriptionKHR; pub type PipelineVertexInputDivisorStateCreateInfoEXT<'a> = PipelineVertexInputDivisorStateCreateInfoKHR<'a>; pub type PhysicalDevice8BitStorageFeaturesKHR<'a> = PhysicalDevice8BitStorageFeatures<'a>; pub type PhysicalDeviceVulkanMemoryModelFeaturesKHR<'a> = PhysicalDeviceVulkanMemoryModelFeatures<'a>; pub type PhysicalDeviceShaderAtomicInt64FeaturesKHR<'a> = PhysicalDeviceShaderAtomicInt64Features<'a>; pub type PhysicalDeviceVertexAttributeDivisorFeaturesEXT<'a> = PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'a>; pub type PhysicalDeviceDepthStencilResolvePropertiesKHR<'a> = PhysicalDeviceDepthStencilResolveProperties<'a>; pub type SubpassDescriptionDepthStencilResolveKHR<'a> = SubpassDescriptionDepthStencilResolve<'a>; pub type PhysicalDeviceFragmentShaderBarycentricFeaturesNV<'a> = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'a>; pub type ImageStencilUsageCreateInfoEXT<'a> = ImageStencilUsageCreateInfo<'a>; pub type PhysicalDeviceScalarBlockLayoutFeaturesEXT<'a> = PhysicalDeviceScalarBlockLayoutFeatures<'a>; pub type PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR<'a> = PhysicalDeviceUniformBufferStandardLayoutFeatures<'a>; pub type PhysicalDeviceBufferDeviceAddressFeaturesKHR<'a> = PhysicalDeviceBufferDeviceAddressFeatures<'a>; pub type PhysicalDeviceBufferAddressFeaturesEXT<'a> = PhysicalDeviceBufferDeviceAddressFeaturesEXT<'a>; pub type BufferDeviceAddressInfoKHR<'a> = BufferDeviceAddressInfo<'a>; pub type BufferDeviceAddressInfoEXT<'a> = BufferDeviceAddressInfo<'a>; pub type BufferOpaqueCaptureAddressCreateInfoKHR<'a> = BufferOpaqueCaptureAddressCreateInfo<'a>; pub type PhysicalDeviceImagelessFramebufferFeaturesKHR<'a> = PhysicalDeviceImagelessFramebufferFeatures<'a>; pub type FramebufferAttachmentsCreateInfoKHR<'a> = FramebufferAttachmentsCreateInfo<'a>; pub type FramebufferAttachmentImageInfoKHR<'a> = FramebufferAttachmentImageInfo<'a>; pub type RenderPassAttachmentBeginInfoKHR<'a> = RenderPassAttachmentBeginInfo<'a>; pub type PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT<'a> = PhysicalDeviceTextureCompressionASTCHDRFeatures<'a>; pub type PipelineCreationFeedbackEXT = PipelineCreationFeedback; pub type PipelineCreationFeedbackCreateInfoEXT<'a> = PipelineCreationFeedbackCreateInfo<'a>; pub type QueryPoolCreateInfoINTEL<'a> = QueryPoolPerformanceQueryCreateInfoINTEL<'a>; pub type PhysicalDeviceIndexTypeUint8FeaturesEXT<'a> = PhysicalDeviceIndexTypeUint8FeaturesKHR<'a>; pub type PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR<'a> = PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'a>; pub type AttachmentReferenceStencilLayoutKHR<'a> = AttachmentReferenceStencilLayout<'a>; pub type AttachmentDescriptionStencilLayoutKHR<'a> = AttachmentDescriptionStencilLayout<'a>; pub type PipelineInfoEXT<'a> = PipelineInfoKHR<'a>; pub type PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT<'a> = PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'a>; pub type PhysicalDeviceTexelBufferAlignmentPropertiesEXT<'a> = PhysicalDeviceTexelBufferAlignmentProperties<'a>; pub type PhysicalDeviceSubgroupSizeControlFeaturesEXT<'a> = PhysicalDeviceSubgroupSizeControlFeatures<'a>; pub type PhysicalDeviceSubgroupSizeControlPropertiesEXT<'a> = PhysicalDeviceSubgroupSizeControlProperties<'a>; pub type PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT<'a> = PipelineShaderStageRequiredSubgroupSizeCreateInfo<'a>; pub type ShaderRequiredSubgroupSizeCreateInfoEXT<'a> = PipelineShaderStageRequiredSubgroupSizeCreateInfo<'a>; pub type MemoryOpaqueCaptureAddressAllocateInfoKHR<'a> = MemoryOpaqueCaptureAddressAllocateInfo<'a>; pub type DeviceMemoryOpaqueCaptureAddressInfoKHR<'a> = DeviceMemoryOpaqueCaptureAddressInfo<'a>; pub type PhysicalDeviceLineRasterizationFeaturesEXT<'a> = PhysicalDeviceLineRasterizationFeaturesKHR<'a>; pub type PhysicalDeviceLineRasterizationPropertiesEXT<'a> = PhysicalDeviceLineRasterizationPropertiesKHR<'a>; pub type PipelineRasterizationLineStateCreateInfoEXT<'a> = PipelineRasterizationLineStateCreateInfoKHR<'a>; pub type PhysicalDevicePipelineCreationCacheControlFeaturesEXT<'a> = PhysicalDevicePipelineCreationCacheControlFeatures<'a>; pub type PhysicalDeviceToolPropertiesEXT<'a> = PhysicalDeviceToolProperties<'a>; pub type AabbPositionsNV = AabbPositionsKHR; pub type TransformMatrixNV = TransformMatrixKHR; pub type AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; pub type PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR<'a> = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'a>; pub type PhysicalDeviceImageRobustnessFeaturesEXT<'a> = PhysicalDeviceImageRobustnessFeatures<'a>; pub type BufferCopy2KHR<'a> = BufferCopy2<'a>; pub type ImageCopy2KHR<'a> = ImageCopy2<'a>; pub type ImageBlit2KHR<'a> = ImageBlit2<'a>; pub type BufferImageCopy2KHR<'a> = BufferImageCopy2<'a>; pub type ImageResolve2KHR<'a> = ImageResolve2<'a>; pub type CopyBufferInfo2KHR<'a> = CopyBufferInfo2<'a>; pub type CopyImageInfo2KHR<'a> = CopyImageInfo2<'a>; pub type BlitImageInfo2KHR<'a> = BlitImageInfo2<'a>; pub type CopyBufferToImageInfo2KHR<'a> = CopyBufferToImageInfo2<'a>; pub type CopyImageToBufferInfo2KHR<'a> = CopyImageToBufferInfo2<'a>; pub type ResolveImageInfo2KHR<'a> = ResolveImageInfo2<'a>; pub type PhysicalDeviceShaderTerminateInvocationFeaturesKHR<'a> = PhysicalDeviceShaderTerminateInvocationFeatures<'a>; pub type PhysicalDeviceMutableDescriptorTypeFeaturesVALVE<'a> = PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'a>; pub type MutableDescriptorTypeListVALVE<'a> = MutableDescriptorTypeListEXT<'a>; pub type MutableDescriptorTypeCreateInfoVALVE<'a> = MutableDescriptorTypeCreateInfoEXT<'a>; pub type MemoryBarrier2KHR<'a> = MemoryBarrier2<'a>; pub type ImageMemoryBarrier2KHR<'a> = ImageMemoryBarrier2<'a>; pub type BufferMemoryBarrier2KHR<'a> = BufferMemoryBarrier2<'a>; pub type DependencyInfoKHR<'a> = DependencyInfo<'a>; pub type SemaphoreSubmitInfoKHR<'a> = SemaphoreSubmitInfo<'a>; pub type CommandBufferSubmitInfoKHR<'a> = CommandBufferSubmitInfo<'a>; pub type SubmitInfo2KHR<'a> = SubmitInfo2<'a>; pub type PhysicalDeviceSynchronization2FeaturesKHR<'a> = PhysicalDeviceSynchronization2Features<'a>; pub type PhysicalDeviceShaderIntegerDotProductFeaturesKHR<'a> = PhysicalDeviceShaderIntegerDotProductFeatures<'a>; pub type PhysicalDeviceShaderIntegerDotProductPropertiesKHR<'a> = PhysicalDeviceShaderIntegerDotProductProperties<'a>; pub type FormatProperties3KHR<'a> = FormatProperties3<'a>; pub type PipelineRenderingCreateInfoKHR<'a> = PipelineRenderingCreateInfo<'a>; pub type RenderingInfoKHR<'a> = RenderingInfo<'a>; pub type RenderingAttachmentInfoKHR<'a> = RenderingAttachmentInfo<'a>; pub type PhysicalDeviceDynamicRenderingFeaturesKHR<'a> = PhysicalDeviceDynamicRenderingFeatures<'a>; pub type CommandBufferInheritanceRenderingInfoKHR<'a> = CommandBufferInheritanceRenderingInfo<'a>; pub type AttachmentSampleCountInfoNV<'a> = AttachmentSampleCountInfoAMD<'a>; pub type PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM<'a> = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'a>; pub type ImageSubresource2EXT<'a> = ImageSubresource2KHR<'a>; pub type SubresourceLayout2EXT<'a> = SubresourceLayout2KHR<'a>; ash-0.38.0+1.3.281/src/vk/bitflags.rs000064400000000000000000002637761046102023000146720ustar 00000000000000use crate::vk::definitions::*; #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCacheCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCacheCreateFlags, Flags); impl PipelineCacheCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct QueueFlags(pub(crate) Flags); vk_bitflags_wrapped!(QueueFlags, Flags); impl QueueFlags { #[doc = "Queue supports graphics operations"] pub const GRAPHICS: Self = Self(0b1); #[doc = "Queue supports compute operations"] pub const COMPUTE: Self = Self(0b10); #[doc = "Queue supports transfer operations"] pub const TRANSFER: Self = Self(0b100); #[doc = "Queue supports sparse resource memory management operations"] pub const SPARSE_BINDING: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CullModeFlags(pub(crate) Flags); vk_bitflags_wrapped!(CullModeFlags, Flags); impl CullModeFlags { pub const NONE: Self = Self(0); pub const FRONT: Self = Self(0b1); pub const BACK: Self = Self(0b10); pub const FRONT_AND_BACK: Self = Self(0x0000_0003); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct RenderPassCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(RenderPassCreateFlags, Flags); impl RenderPassCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceQueueCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(DeviceQueueCreateFlags, Flags); impl DeviceQueueCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryPropertyFlags(pub(crate) Flags); vk_bitflags_wrapped!(MemoryPropertyFlags, Flags); impl MemoryPropertyFlags { #[doc = "If otherwise stated, then allocate memory on device"] pub const DEVICE_LOCAL: Self = Self(0b1); #[doc = "Memory is mappable by host"] pub const HOST_VISIBLE: Self = Self(0b10); #[doc = "Memory will have i/o coherency. If not set, application may need to use vkFlushMappedMemoryRanges and vkInvalidateMappedMemoryRanges to flush/invalidate host cache"] pub const HOST_COHERENT: Self = Self(0b100); #[doc = "Memory will be cached by the host"] pub const HOST_CACHED: Self = Self(0b1000); #[doc = "Memory may be allocated by the driver when it is required"] pub const LAZILY_ALLOCATED: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryHeapFlags(pub(crate) Flags); vk_bitflags_wrapped!(MemoryHeapFlags, Flags); impl MemoryHeapFlags { #[doc = "If set, heap represents device memory"] pub const DEVICE_LOCAL: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AccessFlags(pub(crate) Flags); vk_bitflags_wrapped!(AccessFlags, Flags); impl AccessFlags { #[doc = "Controls coherency of indirect command reads"] pub const INDIRECT_COMMAND_READ: Self = Self(0b1); #[doc = "Controls coherency of index reads"] pub const INDEX_READ: Self = Self(0b10); #[doc = "Controls coherency of vertex attribute reads"] pub const VERTEX_ATTRIBUTE_READ: Self = Self(0b100); #[doc = "Controls coherency of uniform buffer reads"] pub const UNIFORM_READ: Self = Self(0b1000); #[doc = "Controls coherency of input attachment reads"] pub const INPUT_ATTACHMENT_READ: Self = Self(0b1_0000); #[doc = "Controls coherency of shader reads"] pub const SHADER_READ: Self = Self(0b10_0000); #[doc = "Controls coherency of shader writes"] pub const SHADER_WRITE: Self = Self(0b100_0000); #[doc = "Controls coherency of color attachment reads"] pub const COLOR_ATTACHMENT_READ: Self = Self(0b1000_0000); #[doc = "Controls coherency of color attachment writes"] pub const COLOR_ATTACHMENT_WRITE: Self = Self(0b1_0000_0000); #[doc = "Controls coherency of depth/stencil attachment reads"] pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = Self(0b10_0000_0000); #[doc = "Controls coherency of depth/stencil attachment writes"] pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = Self(0b100_0000_0000); #[doc = "Controls coherency of transfer reads"] pub const TRANSFER_READ: Self = Self(0b1000_0000_0000); #[doc = "Controls coherency of transfer writes"] pub const TRANSFER_WRITE: Self = Self(0b1_0000_0000_0000); #[doc = "Controls coherency of host reads"] pub const HOST_READ: Self = Self(0b10_0000_0000_0000); #[doc = "Controls coherency of host writes"] pub const HOST_WRITE: Self = Self(0b100_0000_0000_0000); #[doc = "Controls coherency of memory reads"] pub const MEMORY_READ: Self = Self(0b1000_0000_0000_0000); #[doc = "Controls coherency of memory writes"] pub const MEMORY_WRITE: Self = Self(0b1_0000_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BufferUsageFlags(pub(crate) Flags); vk_bitflags_wrapped!(BufferUsageFlags, Flags); impl BufferUsageFlags { #[doc = "Can be used as a source of transfer operations"] pub const TRANSFER_SRC: Self = Self(0b1); #[doc = "Can be used as a destination of transfer operations"] pub const TRANSFER_DST: Self = Self(0b10); #[doc = "Can be used as TBO"] pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b100); #[doc = "Can be used as IBO"] pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1000); #[doc = "Can be used as UBO"] pub const UNIFORM_BUFFER: Self = Self(0b1_0000); #[doc = "Can be used as SSBO"] pub const STORAGE_BUFFER: Self = Self(0b10_0000); #[doc = "Can be used as source of fixed-function index fetch (index buffer)"] pub const INDEX_BUFFER: Self = Self(0b100_0000); #[doc = "Can be used as source of fixed-function vertex fetch (VBO)"] pub const VERTEX_BUFFER: Self = Self(0b1000_0000); #[doc = "Can be the source of indirect parameters (e.g. indirect buffer, parameter buffer)"] pub const INDIRECT_BUFFER: Self = Self(0b1_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BufferUsageFlags2KHR(pub(crate) Flags64); vk_bitflags_wrapped!(BufferUsageFlags2KHR, Flags64); impl BufferUsageFlags2KHR { pub const TRANSFER_SRC: Self = Self(0b1); pub const TRANSFER_DST: Self = Self(0b10); pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b100); pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1000); pub const UNIFORM_BUFFER: Self = Self(0b1_0000); pub const STORAGE_BUFFER: Self = Self(0b10_0000); pub const INDEX_BUFFER: Self = Self(0b100_0000); pub const VERTEX_BUFFER: Self = Self(0b1000_0000); pub const INDIRECT_BUFFER: Self = Self(0b1_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BufferCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(BufferCreateFlags, Flags); impl BufferCreateFlags { #[doc = "Buffer should support sparse backing"] pub const SPARSE_BINDING: Self = Self(0b1); #[doc = "Buffer should support sparse backing with partial residency"] pub const SPARSE_RESIDENCY: Self = Self(0b10); #[doc = "Buffer should support constant data access to physical memory ranges mapped into multiple locations of sparse buffers"] pub const SPARSE_ALIASED: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ShaderStageFlags(pub(crate) Flags); vk_bitflags_wrapped!(ShaderStageFlags, Flags); impl ShaderStageFlags { pub const VERTEX: Self = Self(0b1); pub const TESSELLATION_CONTROL: Self = Self(0b10); pub const TESSELLATION_EVALUATION: Self = Self(0b100); pub const GEOMETRY: Self = Self(0b1000); pub const FRAGMENT: Self = Self(0b1_0000); pub const COMPUTE: Self = Self(0b10_0000); pub const ALL_GRAPHICS: Self = Self(0x0000_001F); pub const ALL: Self = Self(0x7FFF_FFFF); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageUsageFlags(pub(crate) Flags); vk_bitflags_wrapped!(ImageUsageFlags, Flags); impl ImageUsageFlags { #[doc = "Can be used as a source of transfer operations"] pub const TRANSFER_SRC: Self = Self(0b1); #[doc = "Can be used as a destination of transfer operations"] pub const TRANSFER_DST: Self = Self(0b10); #[doc = "Can be sampled from (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"] pub const SAMPLED: Self = Self(0b100); #[doc = "Can be used as storage image (STORAGE_IMAGE descriptor type)"] pub const STORAGE: Self = Self(0b1000); #[doc = "Can be used as framebuffer color attachment"] pub const COLOR_ATTACHMENT: Self = Self(0b1_0000); #[doc = "Can be used as framebuffer depth/stencil attachment"] pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000); #[doc = "Image data not needed outside of rendering"] pub const TRANSIENT_ATTACHMENT: Self = Self(0b100_0000); #[doc = "Can be used as framebuffer input attachment"] pub const INPUT_ATTACHMENT: Self = Self(0b1000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(ImageCreateFlags, Flags); impl ImageCreateFlags { #[doc = "Image should support sparse backing"] pub const SPARSE_BINDING: Self = Self(0b1); #[doc = "Image should support sparse backing with partial residency"] pub const SPARSE_RESIDENCY: Self = Self(0b10); #[doc = "Image should support constant data access to physical memory ranges mapped into multiple locations of sparse images"] pub const SPARSE_ALIASED: Self = Self(0b100); #[doc = "Allows image views to have different format than the base image"] pub const MUTABLE_FORMAT: Self = Self(0b1000); #[doc = "Allows creating image views with cube type from the created image"] pub const CUBE_COMPATIBLE: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageViewCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(ImageViewCreateFlags, Flags); impl ImageViewCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SamplerCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(SamplerCreateFlags, Flags); impl SamplerCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCreateFlags, Flags); impl PipelineCreateFlags { pub const DISABLE_OPTIMIZATION: Self = Self(0b1); pub const ALLOW_DERIVATIVES: Self = Self(0b10); pub const DERIVATIVE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCreateFlags2KHR(pub(crate) Flags64); vk_bitflags_wrapped!(PipelineCreateFlags2KHR, Flags64); impl PipelineCreateFlags2KHR { pub const DISABLE_OPTIMIZATION: Self = Self(0b1); pub const ALLOW_DERIVATIVES: Self = Self(0b10); pub const DERIVATIVE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineShaderStageCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineShaderStageCreateFlags, Flags); impl PipelineShaderStageCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ColorComponentFlags(pub(crate) Flags); vk_bitflags_wrapped!(ColorComponentFlags, Flags); impl ColorComponentFlags { pub const R: Self = Self(0b1); pub const G: Self = Self(0b10); pub const B: Self = Self(0b100); pub const A: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FenceCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(FenceCreateFlags, Flags); impl FenceCreateFlags { pub const SIGNALED: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SemaphoreCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(SemaphoreCreateFlags, Flags); impl SemaphoreCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FormatFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(FormatFeatureFlags, Flags); impl FormatFeatureFlags { #[doc = "Format can be used for sampled images (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"] pub const SAMPLED_IMAGE: Self = Self(0b1); #[doc = "Format can be used for storage images (STORAGE_IMAGE descriptor type)"] pub const STORAGE_IMAGE: Self = Self(0b10); #[doc = "Format supports atomic operations in case it is used for storage images"] pub const STORAGE_IMAGE_ATOMIC: Self = Self(0b100); #[doc = "Format can be used for uniform texel buffers (TBOs)"] pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b1000); #[doc = "Format can be used for storage texel buffers (IBOs)"] pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1_0000); #[doc = "Format supports atomic operations in case it is used for storage texel buffers"] pub const STORAGE_TEXEL_BUFFER_ATOMIC: Self = Self(0b10_0000); #[doc = "Format can be used for vertex buffers (VBOs)"] pub const VERTEX_BUFFER: Self = Self(0b100_0000); #[doc = "Format can be used for color attachment images"] pub const COLOR_ATTACHMENT: Self = Self(0b1000_0000); #[doc = "Format supports blending in case it is used for color attachment images"] pub const COLOR_ATTACHMENT_BLEND: Self = Self(0b1_0000_0000); #[doc = "Format can be used for depth/stencil attachment images"] pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000_0000); #[doc = "Format can be used as the source image of blits with vkCmdBlitImage"] pub const BLIT_SRC: Self = Self(0b100_0000_0000); #[doc = "Format can be used as the destination image of blits with vkCmdBlitImage"] pub const BLIT_DST: Self = Self(0b1000_0000_0000); #[doc = "Format can be filtered with VK_FILTER_LINEAR when being sampled"] pub const SAMPLED_IMAGE_FILTER_LINEAR: Self = Self(0b1_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct QueryControlFlags(pub(crate) Flags); vk_bitflags_wrapped!(QueryControlFlags, Flags); impl QueryControlFlags { #[doc = "Require precise results to be collected by the query"] pub const PRECISE: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct QueryResultFlags(pub(crate) Flags); vk_bitflags_wrapped!(QueryResultFlags, Flags); impl QueryResultFlags { #[doc = "Results of the queries are written to the destination buffer as 64-bit values"] pub const TYPE_64: Self = Self(0b1); #[doc = "Results of the queries are waited on before proceeding with the result copy"] pub const WAIT: Self = Self(0b10); #[doc = "Besides the results of the query, the availability of the results is also written"] pub const WITH_AVAILABILITY: Self = Self(0b100); #[doc = "Copy the partial results of the query even if the final results are not available"] pub const PARTIAL: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CommandBufferUsageFlags(pub(crate) Flags); vk_bitflags_wrapped!(CommandBufferUsageFlags, Flags); impl CommandBufferUsageFlags { pub const ONE_TIME_SUBMIT: Self = Self(0b1); pub const RENDER_PASS_CONTINUE: Self = Self(0b10); #[doc = "Command buffer may be submitted/executed more than once simultaneously"] pub const SIMULTANEOUS_USE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct QueryPipelineStatisticFlags(pub(crate) Flags); vk_bitflags_wrapped!(QueryPipelineStatisticFlags, Flags); impl QueryPipelineStatisticFlags { #[doc = "Optional"] pub const INPUT_ASSEMBLY_VERTICES: Self = Self(0b1); #[doc = "Optional"] pub const INPUT_ASSEMBLY_PRIMITIVES: Self = Self(0b10); #[doc = "Optional"] pub const VERTEX_SHADER_INVOCATIONS: Self = Self(0b100); #[doc = "Optional"] pub const GEOMETRY_SHADER_INVOCATIONS: Self = Self(0b1000); #[doc = "Optional"] pub const GEOMETRY_SHADER_PRIMITIVES: Self = Self(0b1_0000); #[doc = "Optional"] pub const CLIPPING_INVOCATIONS: Self = Self(0b10_0000); #[doc = "Optional"] pub const CLIPPING_PRIMITIVES: Self = Self(0b100_0000); #[doc = "Optional"] pub const FRAGMENT_SHADER_INVOCATIONS: Self = Self(0b1000_0000); #[doc = "Optional"] pub const TESSELLATION_CONTROL_SHADER_PATCHES: Self = Self(0b1_0000_0000); #[doc = "Optional"] pub const TESSELLATION_EVALUATION_SHADER_INVOCATIONS: Self = Self(0b10_0000_0000); #[doc = "Optional"] pub const COMPUTE_SHADER_INVOCATIONS: Self = Self(0b100_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryMapFlags(pub(crate) Flags); vk_bitflags_wrapped!(MemoryMapFlags, Flags); impl MemoryMapFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageAspectFlags(pub(crate) Flags); vk_bitflags_wrapped!(ImageAspectFlags, Flags); impl ImageAspectFlags { pub const COLOR: Self = Self(0b1); pub const DEPTH: Self = Self(0b10); pub const STENCIL: Self = Self(0b100); pub const METADATA: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SparseImageFormatFlags(pub(crate) Flags); vk_bitflags_wrapped!(SparseImageFormatFlags, Flags); impl SparseImageFormatFlags { #[doc = "Image uses a single mip tail region for all array layers"] pub const SINGLE_MIPTAIL: Self = Self(0b1); #[doc = "Image requires mip level dimensions to be an integer multiple of the sparse image block dimensions for non-tail mip levels."] pub const ALIGNED_MIP_SIZE: Self = Self(0b10); #[doc = "Image uses a non-standard sparse image block dimensions"] pub const NONSTANDARD_BLOCK_SIZE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SparseMemoryBindFlags(pub(crate) Flags); vk_bitflags_wrapped!(SparseMemoryBindFlags, Flags); impl SparseMemoryBindFlags { #[doc = "Operation binds resource metadata to memory"] pub const METADATA: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineStageFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineStageFlags, Flags); impl PipelineStageFlags { #[doc = "Before subsequent commands are processed"] pub const TOP_OF_PIPE: Self = Self(0b1); #[doc = "Draw/DispatchIndirect command fetch"] pub const DRAW_INDIRECT: Self = Self(0b10); #[doc = "Vertex/index fetch"] pub const VERTEX_INPUT: Self = Self(0b100); #[doc = "Vertex shading"] pub const VERTEX_SHADER: Self = Self(0b1000); #[doc = "Tessellation control shading"] pub const TESSELLATION_CONTROL_SHADER: Self = Self(0b1_0000); #[doc = "Tessellation evaluation shading"] pub const TESSELLATION_EVALUATION_SHADER: Self = Self(0b10_0000); #[doc = "Geometry shading"] pub const GEOMETRY_SHADER: Self = Self(0b100_0000); #[doc = "Fragment shading"] pub const FRAGMENT_SHADER: Self = Self(0b1000_0000); #[doc = "Early fragment (depth and stencil) tests"] pub const EARLY_FRAGMENT_TESTS: Self = Self(0b1_0000_0000); #[doc = "Late fragment (depth and stencil) tests"] pub const LATE_FRAGMENT_TESTS: Self = Self(0b10_0000_0000); #[doc = "Color attachment writes"] pub const COLOR_ATTACHMENT_OUTPUT: Self = Self(0b100_0000_0000); #[doc = "Compute shading"] pub const COMPUTE_SHADER: Self = Self(0b1000_0000_0000); #[doc = "Transfer/copy operations"] pub const TRANSFER: Self = Self(0b1_0000_0000_0000); #[doc = "After previous commands have completed"] pub const BOTTOM_OF_PIPE: Self = Self(0b10_0000_0000_0000); #[doc = "Indicates host (CPU) is a source/sink of the dependency"] pub const HOST: Self = Self(0b100_0000_0000_0000); #[doc = "All stages of the graphics pipeline"] pub const ALL_GRAPHICS: Self = Self(0b1000_0000_0000_0000); #[doc = "All stages supported on the queue"] pub const ALL_COMMANDS: Self = Self(0b1_0000_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CommandPoolCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(CommandPoolCreateFlags, Flags); impl CommandPoolCreateFlags { #[doc = "Command buffers have a short lifetime"] pub const TRANSIENT: Self = Self(0b1); #[doc = "Command buffers may release their memory individually"] pub const RESET_COMMAND_BUFFER: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CommandPoolResetFlags(pub(crate) Flags); vk_bitflags_wrapped!(CommandPoolResetFlags, Flags); impl CommandPoolResetFlags { #[doc = "Release resources owned by the pool"] pub const RELEASE_RESOURCES: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CommandBufferResetFlags(pub(crate) Flags); vk_bitflags_wrapped!(CommandBufferResetFlags, Flags); impl CommandBufferResetFlags { #[doc = "Release resources owned by the buffer"] pub const RELEASE_RESOURCES: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SampleCountFlags(pub(crate) Flags); vk_bitflags_wrapped!(SampleCountFlags, Flags); impl SampleCountFlags { #[doc = "Sample count 1 supported"] pub const TYPE_1: Self = Self(0b1); #[doc = "Sample count 2 supported"] pub const TYPE_2: Self = Self(0b10); #[doc = "Sample count 4 supported"] pub const TYPE_4: Self = Self(0b100); #[doc = "Sample count 8 supported"] pub const TYPE_8: Self = Self(0b1000); #[doc = "Sample count 16 supported"] pub const TYPE_16: Self = Self(0b1_0000); #[doc = "Sample count 32 supported"] pub const TYPE_32: Self = Self(0b10_0000); #[doc = "Sample count 64 supported"] pub const TYPE_64: Self = Self(0b100_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AttachmentDescriptionFlags(pub(crate) Flags); vk_bitflags_wrapped!(AttachmentDescriptionFlags, Flags); impl AttachmentDescriptionFlags { #[doc = "The attachment may alias physical memory of another attachment in the same render pass"] pub const MAY_ALIAS: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct StencilFaceFlags(pub(crate) Flags); vk_bitflags_wrapped!(StencilFaceFlags, Flags); impl StencilFaceFlags { #[doc = "Front face"] pub const FRONT: Self = Self(0b1); #[doc = "Back face"] pub const BACK: Self = Self(0b10); #[doc = "Front and back faces"] pub const FRONT_AND_BACK: Self = Self(0x0000_0003); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DescriptorPoolCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(DescriptorPoolCreateFlags, Flags); impl DescriptorPoolCreateFlags { #[doc = "Descriptor sets may be freed individually"] pub const FREE_DESCRIPTOR_SET: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DependencyFlags(pub(crate) Flags); vk_bitflags_wrapped!(DependencyFlags, Flags); impl DependencyFlags { #[doc = "Dependency is per pixel region "] pub const BY_REGION: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SemaphoreWaitFlags(pub(crate) Flags); vk_bitflags_wrapped!(SemaphoreWaitFlags, Flags); impl SemaphoreWaitFlags { pub const ANY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DisplayPlaneAlphaFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(DisplayPlaneAlphaFlagsKHR, Flags); impl DisplayPlaneAlphaFlagsKHR { pub const OPAQUE: Self = Self(0b1); pub const GLOBAL: Self = Self(0b10); pub const PER_PIXEL: Self = Self(0b100); pub const PER_PIXEL_PREMULTIPLIED: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CompositeAlphaFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(CompositeAlphaFlagsKHR, Flags); impl CompositeAlphaFlagsKHR { pub const OPAQUE: Self = Self(0b1); pub const PRE_MULTIPLIED: Self = Self(0b10); pub const POST_MULTIPLIED: Self = Self(0b100); pub const INHERIT: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SurfaceTransformFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(SurfaceTransformFlagsKHR, Flags); impl SurfaceTransformFlagsKHR { pub const IDENTITY: Self = Self(0b1); pub const ROTATE_90: Self = Self(0b10); pub const ROTATE_180: Self = Self(0b100); pub const ROTATE_270: Self = Self(0b1000); pub const HORIZONTAL_MIRROR: Self = Self(0b1_0000); pub const HORIZONTAL_MIRROR_ROTATE_90: Self = Self(0b10_0000); pub const HORIZONTAL_MIRROR_ROTATE_180: Self = Self(0b100_0000); pub const HORIZONTAL_MIRROR_ROTATE_270: Self = Self(0b1000_0000); pub const INHERIT: Self = Self(0b1_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SwapchainImageUsageFlagsANDROID(pub(crate) Flags); vk_bitflags_wrapped!(SwapchainImageUsageFlagsANDROID, Flags); impl SwapchainImageUsageFlagsANDROID { pub const SHARED: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DebugReportFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DebugReportFlagsEXT, Flags); impl DebugReportFlagsEXT { pub const INFORMATION: Self = Self(0b1); pub const WARNING: Self = Self(0b10); pub const PERFORMANCE_WARNING: Self = Self(0b100); pub const ERROR: Self = Self(0b1000); pub const DEBUG: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalMemoryHandleTypeFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlagsNV, Flags); impl ExternalMemoryHandleTypeFlagsNV { pub const OPAQUE_WIN32: Self = Self(0b1); pub const OPAQUE_WIN32_KMT: Self = Self(0b10); pub const D3D11_IMAGE: Self = Self(0b100); pub const D3D11_IMAGE_KMT: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalMemoryFeatureFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(ExternalMemoryFeatureFlagsNV, Flags); impl ExternalMemoryFeatureFlagsNV { pub const DEDICATED_ONLY: Self = Self(0b1); pub const EXPORTABLE: Self = Self(0b10); pub const IMPORTABLE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SubgroupFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(SubgroupFeatureFlags, Flags); impl SubgroupFeatureFlags { #[doc = "Basic subgroup operations"] pub const BASIC: Self = Self(0b1); #[doc = "Vote subgroup operations"] pub const VOTE: Self = Self(0b10); #[doc = "Arithmetic subgroup operations"] pub const ARITHMETIC: Self = Self(0b100); #[doc = "Ballot subgroup operations"] pub const BALLOT: Self = Self(0b1000); #[doc = "Shuffle subgroup operations"] pub const SHUFFLE: Self = Self(0b1_0000); #[doc = "Shuffle relative subgroup operations"] pub const SHUFFLE_RELATIVE: Self = Self(0b10_0000); #[doc = "Clustered subgroup operations"] pub const CLUSTERED: Self = Self(0b100_0000); #[doc = "Quad subgroup operations"] pub const QUAD: Self = Self(0b1000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct IndirectCommandsLayoutUsageFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(IndirectCommandsLayoutUsageFlagsNV, Flags); impl IndirectCommandsLayoutUsageFlagsNV { pub const EXPLICIT_PREPROCESS: Self = Self(0b1); pub const INDEXED_SEQUENCES: Self = Self(0b10); pub const UNORDERED_SEQUENCES: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct IndirectStateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(IndirectStateFlagsNV, Flags); impl IndirectStateFlagsNV { pub const FLAG_FRONTFACE: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PrivateDataSlotCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PrivateDataSlotCreateFlags, Flags); impl PrivateDataSlotCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DescriptorSetLayoutCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(DescriptorSetLayoutCreateFlags, Flags); impl DescriptorSetLayoutCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalMemoryHandleTypeFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlags, Flags); impl ExternalMemoryHandleTypeFlags { pub const OPAQUE_FD: Self = Self(0b1); pub const OPAQUE_WIN32: Self = Self(0b10); pub const OPAQUE_WIN32_KMT: Self = Self(0b100); pub const D3D11_TEXTURE: Self = Self(0b1000); pub const D3D11_TEXTURE_KMT: Self = Self(0b1_0000); pub const D3D12_HEAP: Self = Self(0b10_0000); pub const D3D12_RESOURCE: Self = Self(0b100_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalMemoryFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalMemoryFeatureFlags, Flags); impl ExternalMemoryFeatureFlags { pub const DEDICATED_ONLY: Self = Self(0b1); pub const EXPORTABLE: Self = Self(0b10); pub const IMPORTABLE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalSemaphoreHandleTypeFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalSemaphoreHandleTypeFlags, Flags); impl ExternalSemaphoreHandleTypeFlags { pub const OPAQUE_FD: Self = Self(0b1); pub const OPAQUE_WIN32: Self = Self(0b10); pub const OPAQUE_WIN32_KMT: Self = Self(0b100); pub const D3D12_FENCE: Self = Self(0b1000); pub const D3D11_FENCE: Self = Self::D3D12_FENCE; pub const SYNC_FD: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalSemaphoreFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalSemaphoreFeatureFlags, Flags); impl ExternalSemaphoreFeatureFlags { pub const EXPORTABLE: Self = Self(0b1); pub const IMPORTABLE: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SemaphoreImportFlags(pub(crate) Flags); vk_bitflags_wrapped!(SemaphoreImportFlags, Flags); impl SemaphoreImportFlags { pub const TEMPORARY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalFenceHandleTypeFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalFenceHandleTypeFlags, Flags); impl ExternalFenceHandleTypeFlags { pub const OPAQUE_FD: Self = Self(0b1); pub const OPAQUE_WIN32: Self = Self(0b10); pub const OPAQUE_WIN32_KMT: Self = Self(0b100); pub const SYNC_FD: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExternalFenceFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(ExternalFenceFeatureFlags, Flags); impl ExternalFenceFeatureFlags { pub const EXPORTABLE: Self = Self(0b1); pub const IMPORTABLE: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FenceImportFlags(pub(crate) Flags); vk_bitflags_wrapped!(FenceImportFlags, Flags); impl FenceImportFlags { pub const TEMPORARY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SurfaceCounterFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(SurfaceCounterFlagsEXT, Flags); impl SurfaceCounterFlagsEXT { pub const VBLANK: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PeerMemoryFeatureFlags(pub(crate) Flags); vk_bitflags_wrapped!(PeerMemoryFeatureFlags, Flags); impl PeerMemoryFeatureFlags { #[doc = "Can read with vkCmdCopy commands"] pub const COPY_SRC: Self = Self(0b1); #[doc = "Can write with vkCmdCopy commands"] pub const COPY_DST: Self = Self(0b10); #[doc = "Can read with any access type/command"] pub const GENERIC_SRC: Self = Self(0b100); #[doc = "Can write with and access type/command"] pub const GENERIC_DST: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryAllocateFlags(pub(crate) Flags); vk_bitflags_wrapped!(MemoryAllocateFlags, Flags); impl MemoryAllocateFlags { #[doc = "Force allocation on specific devices"] pub const DEVICE_MASK: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceGroupPresentModeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(DeviceGroupPresentModeFlagsKHR, Flags); impl DeviceGroupPresentModeFlagsKHR { #[doc = "Present from local memory"] pub const LOCAL: Self = Self(0b1); #[doc = "Present from remote memory"] pub const REMOTE: Self = Self(0b10); #[doc = "Present sum of local and/or remote memory"] pub const SUM: Self = Self(0b100); #[doc = "Each physical device presents from local memory"] pub const LOCAL_MULTI_DEVICE: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SwapchainCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(SwapchainCreateFlagsKHR, Flags); impl SwapchainCreateFlagsKHR {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SubpassDescriptionFlags(pub(crate) Flags); vk_bitflags_wrapped!(SubpassDescriptionFlags, Flags); impl SubpassDescriptionFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DebugUtilsMessageSeverityFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DebugUtilsMessageSeverityFlagsEXT, Flags); impl DebugUtilsMessageSeverityFlagsEXT { pub const VERBOSE: Self = Self(0b1); pub const INFO: Self = Self(0b1_0000); pub const WARNING: Self = Self(0b1_0000_0000); pub const ERROR: Self = Self(0b1_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DebugUtilsMessageTypeFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DebugUtilsMessageTypeFlagsEXT, Flags); impl DebugUtilsMessageTypeFlagsEXT { pub const GENERAL: Self = Self(0b1); pub const VALIDATION: Self = Self(0b10); pub const PERFORMANCE: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DescriptorBindingFlags(pub(crate) Flags); vk_bitflags_wrapped!(DescriptorBindingFlags, Flags); impl DescriptorBindingFlags { pub const UPDATE_AFTER_BIND: Self = Self(0b1); pub const UPDATE_UNUSED_WHILE_PENDING: Self = Self(0b10); pub const PARTIALLY_BOUND: Self = Self(0b100); pub const VARIABLE_DESCRIPTOR_COUNT: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ConditionalRenderingFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ConditionalRenderingFlagsEXT, Flags); impl ConditionalRenderingFlagsEXT { pub const INVERTED: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ResolveModeFlags(pub(crate) Flags); vk_bitflags_wrapped!(ResolveModeFlags, Flags); impl ResolveModeFlags { pub const NONE: Self = Self(0); pub const SAMPLE_ZERO: Self = Self(0b1); pub const AVERAGE: Self = Self(0b10); pub const MIN: Self = Self(0b100); pub const MAX: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct GeometryInstanceFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(GeometryInstanceFlagsKHR, Flags); impl GeometryInstanceFlagsKHR { pub const TRIANGLE_FACING_CULL_DISABLE: Self = Self(0b1); pub const TRIANGLE_FLIP_FACING: Self = Self(0b10); pub const FORCE_OPAQUE: Self = Self(0b100); pub const FORCE_NO_OPAQUE: Self = Self(0b1000); pub const TRIANGLE_FRONT_COUNTERCLOCKWISE: Self = Self::TRIANGLE_FLIP_FACING; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct GeometryFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(GeometryFlagsKHR, Flags); impl GeometryFlagsKHR { pub const OPAQUE: Self = Self(0b1); pub const NO_DUPLICATE_ANY_HIT_INVOCATION: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BuildAccelerationStructureFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(BuildAccelerationStructureFlagsKHR, Flags); impl BuildAccelerationStructureFlagsKHR { pub const ALLOW_UPDATE: Self = Self(0b1); pub const ALLOW_COMPACTION: Self = Self(0b10); pub const PREFER_FAST_TRACE: Self = Self(0b100); pub const PREFER_FAST_BUILD: Self = Self(0b1000); pub const LOW_MEMORY: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AccelerationStructureCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(AccelerationStructureCreateFlagsKHR, Flags); impl AccelerationStructureCreateFlagsKHR { pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FramebufferCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(FramebufferCreateFlags, Flags); impl FramebufferCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceDiagnosticsConfigFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(DeviceDiagnosticsConfigFlagsNV, Flags); impl DeviceDiagnosticsConfigFlagsNV { pub const ENABLE_SHADER_DEBUG_INFO: Self = Self(0b1); pub const ENABLE_RESOURCE_TRACKING: Self = Self(0b10); pub const ENABLE_AUTOMATIC_CHECKPOINTS: Self = Self(0b100); pub const ENABLE_SHADER_ERROR_REPORTING: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCreationFeedbackFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCreationFeedbackFlags, Flags); impl PipelineCreationFeedbackFlags { pub const VALID: Self = Self(0b1); pub const VALID_EXT: Self = Self::VALID; pub const APPLICATION_PIPELINE_CACHE_HIT: Self = Self(0b10); pub const APPLICATION_PIPELINE_CACHE_HIT_EXT: Self = Self::APPLICATION_PIPELINE_CACHE_HIT; pub const BASE_PIPELINE_ACCELERATION: Self = Self(0b100); pub const BASE_PIPELINE_ACCELERATION_EXT: Self = Self::BASE_PIPELINE_ACCELERATION; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryDecompressionMethodFlagsNV(pub(crate) Flags64); vk_bitflags_wrapped!(MemoryDecompressionMethodFlagsNV, Flags64); impl MemoryDecompressionMethodFlagsNV { pub const GDEFLATE_1_0: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PerformanceCounterDescriptionFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(PerformanceCounterDescriptionFlagsKHR, Flags); impl PerformanceCounterDescriptionFlagsKHR { pub const PERFORMANCE_IMPACTING: Self = Self(0b1); pub const CONCURRENTLY_IMPACTED: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AcquireProfilingLockFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(AcquireProfilingLockFlagsKHR, Flags); impl AcquireProfilingLockFlagsKHR {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ShaderCorePropertiesFlagsAMD(pub(crate) Flags); vk_bitflags_wrapped!(ShaderCorePropertiesFlagsAMD, Flags); impl ShaderCorePropertiesFlagsAMD {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ShaderModuleCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(ShaderModuleCreateFlags, Flags); impl ShaderModuleCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCompilerControlFlagsAMD(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCompilerControlFlagsAMD, Flags); impl PipelineCompilerControlFlagsAMD {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ToolPurposeFlags(pub(crate) Flags); vk_bitflags_wrapped!(ToolPurposeFlags, Flags); impl ToolPurposeFlags { pub const VALIDATION: Self = Self(0b1); pub const VALIDATION_EXT: Self = Self::VALIDATION; pub const PROFILING: Self = Self(0b10); pub const PROFILING_EXT: Self = Self::PROFILING; pub const TRACING: Self = Self(0b100); pub const TRACING_EXT: Self = Self::TRACING; pub const ADDITIONAL_FEATURES: Self = Self(0b1000); pub const ADDITIONAL_FEATURES_EXT: Self = Self::ADDITIONAL_FEATURES; pub const MODIFYING_FEATURES: Self = Self(0b1_0000); pub const MODIFYING_FEATURES_EXT: Self = Self::MODIFYING_FEATURES; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AccessFlags2(pub(crate) Flags64); vk_bitflags_wrapped!(AccessFlags2, Flags64); impl AccessFlags2 { pub const NONE: Self = Self(0); pub const NONE_KHR: Self = Self::NONE; pub const INDIRECT_COMMAND_READ: Self = Self(0b1); pub const INDIRECT_COMMAND_READ_KHR: Self = Self::INDIRECT_COMMAND_READ; pub const INDEX_READ: Self = Self(0b10); pub const INDEX_READ_KHR: Self = Self::INDEX_READ; pub const VERTEX_ATTRIBUTE_READ: Self = Self(0b100); pub const VERTEX_ATTRIBUTE_READ_KHR: Self = Self::VERTEX_ATTRIBUTE_READ; pub const UNIFORM_READ: Self = Self(0b1000); pub const UNIFORM_READ_KHR: Self = Self::UNIFORM_READ; pub const INPUT_ATTACHMENT_READ: Self = Self(0b1_0000); pub const INPUT_ATTACHMENT_READ_KHR: Self = Self::INPUT_ATTACHMENT_READ; pub const SHADER_READ: Self = Self(0b10_0000); pub const SHADER_READ_KHR: Self = Self::SHADER_READ; pub const SHADER_WRITE: Self = Self(0b100_0000); pub const SHADER_WRITE_KHR: Self = Self::SHADER_WRITE; pub const COLOR_ATTACHMENT_READ: Self = Self(0b1000_0000); pub const COLOR_ATTACHMENT_READ_KHR: Self = Self::COLOR_ATTACHMENT_READ; pub const COLOR_ATTACHMENT_WRITE: Self = Self(0b1_0000_0000); pub const COLOR_ATTACHMENT_WRITE_KHR: Self = Self::COLOR_ATTACHMENT_WRITE; pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = Self(0b10_0000_0000); pub const DEPTH_STENCIL_ATTACHMENT_READ_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT_READ; pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = Self(0b100_0000_0000); pub const DEPTH_STENCIL_ATTACHMENT_WRITE_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT_WRITE; pub const TRANSFER_READ: Self = Self(0b1000_0000_0000); pub const TRANSFER_READ_KHR: Self = Self::TRANSFER_READ; pub const TRANSFER_WRITE: Self = Self(0b1_0000_0000_0000); pub const TRANSFER_WRITE_KHR: Self = Self::TRANSFER_WRITE; pub const HOST_READ: Self = Self(0b10_0000_0000_0000); pub const HOST_READ_KHR: Self = Self::HOST_READ; pub const HOST_WRITE: Self = Self(0b100_0000_0000_0000); pub const HOST_WRITE_KHR: Self = Self::HOST_WRITE; pub const MEMORY_READ: Self = Self(0b1000_0000_0000_0000); pub const MEMORY_READ_KHR: Self = Self::MEMORY_READ; pub const MEMORY_WRITE: Self = Self(0b1_0000_0000_0000_0000); pub const MEMORY_WRITE_KHR: Self = Self::MEMORY_WRITE; pub const SHADER_SAMPLED_READ: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000); pub const SHADER_SAMPLED_READ_KHR: Self = Self::SHADER_SAMPLED_READ; pub const SHADER_STORAGE_READ: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000); pub const SHADER_STORAGE_READ_KHR: Self = Self::SHADER_STORAGE_READ; pub const SHADER_STORAGE_WRITE: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000); pub const SHADER_STORAGE_WRITE_KHR: Self = Self::SHADER_STORAGE_WRITE; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineStageFlags2(pub(crate) Flags64); vk_bitflags_wrapped!(PipelineStageFlags2, Flags64); impl PipelineStageFlags2 { pub const NONE: Self = Self(0); pub const NONE_KHR: Self = Self::NONE; pub const TOP_OF_PIPE: Self = Self(0b1); pub const TOP_OF_PIPE_KHR: Self = Self::TOP_OF_PIPE; pub const DRAW_INDIRECT: Self = Self(0b10); pub const DRAW_INDIRECT_KHR: Self = Self::DRAW_INDIRECT; pub const VERTEX_INPUT: Self = Self(0b100); pub const VERTEX_INPUT_KHR: Self = Self::VERTEX_INPUT; pub const VERTEX_SHADER: Self = Self(0b1000); pub const VERTEX_SHADER_KHR: Self = Self::VERTEX_SHADER; pub const TESSELLATION_CONTROL_SHADER: Self = Self(0b1_0000); pub const TESSELLATION_CONTROL_SHADER_KHR: Self = Self::TESSELLATION_CONTROL_SHADER; pub const TESSELLATION_EVALUATION_SHADER: Self = Self(0b10_0000); pub const TESSELLATION_EVALUATION_SHADER_KHR: Self = Self::TESSELLATION_EVALUATION_SHADER; pub const GEOMETRY_SHADER: Self = Self(0b100_0000); pub const GEOMETRY_SHADER_KHR: Self = Self::GEOMETRY_SHADER; pub const FRAGMENT_SHADER: Self = Self(0b1000_0000); pub const FRAGMENT_SHADER_KHR: Self = Self::FRAGMENT_SHADER; pub const EARLY_FRAGMENT_TESTS: Self = Self(0b1_0000_0000); pub const EARLY_FRAGMENT_TESTS_KHR: Self = Self::EARLY_FRAGMENT_TESTS; pub const LATE_FRAGMENT_TESTS: Self = Self(0b10_0000_0000); pub const LATE_FRAGMENT_TESTS_KHR: Self = Self::LATE_FRAGMENT_TESTS; pub const COLOR_ATTACHMENT_OUTPUT: Self = Self(0b100_0000_0000); pub const COLOR_ATTACHMENT_OUTPUT_KHR: Self = Self::COLOR_ATTACHMENT_OUTPUT; pub const COMPUTE_SHADER: Self = Self(0b1000_0000_0000); pub const COMPUTE_SHADER_KHR: Self = Self::COMPUTE_SHADER; pub const ALL_TRANSFER: Self = Self(0b1_0000_0000_0000); pub const ALL_TRANSFER_KHR: Self = Self::ALL_TRANSFER; pub const TRANSFER: Self = Self::ALL_TRANSFER_KHR; pub const TRANSFER_KHR: Self = Self::ALL_TRANSFER; pub const BOTTOM_OF_PIPE: Self = Self(0b10_0000_0000_0000); pub const BOTTOM_OF_PIPE_KHR: Self = Self::BOTTOM_OF_PIPE; pub const HOST: Self = Self(0b100_0000_0000_0000); pub const HOST_KHR: Self = Self::HOST; pub const ALL_GRAPHICS: Self = Self(0b1000_0000_0000_0000); pub const ALL_GRAPHICS_KHR: Self = Self::ALL_GRAPHICS; pub const ALL_COMMANDS: Self = Self(0b1_0000_0000_0000_0000); pub const ALL_COMMANDS_KHR: Self = Self::ALL_COMMANDS; pub const COPY: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000); pub const COPY_KHR: Self = Self::COPY; pub const RESOLVE: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000); pub const RESOLVE_KHR: Self = Self::RESOLVE; pub const BLIT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000); pub const BLIT_KHR: Self = Self::BLIT; pub const CLEAR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000); pub const CLEAR_KHR: Self = Self::CLEAR; pub const INDEX_INPUT: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const INDEX_INPUT_KHR: Self = Self::INDEX_INPUT; pub const VERTEX_ATTRIBUTE_INPUT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const VERTEX_ATTRIBUTE_INPUT_KHR: Self = Self::VERTEX_ATTRIBUTE_INPUT; pub const PRE_RASTERIZATION_SHADERS: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const PRE_RASTERIZATION_SHADERS_KHR: Self = Self::PRE_RASTERIZATION_SHADERS; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct SubmitFlags(pub(crate) Flags); vk_bitflags_wrapped!(SubmitFlags, Flags); impl SubmitFlags { pub const PROTECTED: Self = Self(0b1); pub const PROTECTED_KHR: Self = Self::PROTECTED; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct EventCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(EventCreateFlags, Flags); impl EventCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineLayoutCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineLayoutCreateFlags, Flags); impl PipelineLayoutCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineColorBlendStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineColorBlendStateCreateFlags, Flags); impl PipelineColorBlendStateCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineDepthStencilStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineDepthStencilStateCreateFlags, Flags); impl PipelineDepthStencilStateCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct GraphicsPipelineLibraryFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(GraphicsPipelineLibraryFlagsEXT, Flags); impl GraphicsPipelineLibraryFlagsEXT { pub const VERTEX_INPUT_INTERFACE: Self = Self(0b1); pub const PRE_RASTERIZATION_SHADERS: Self = Self(0b10); pub const FRAGMENT_SHADER: Self = Self(0b100); pub const FRAGMENT_OUTPUT_INTERFACE: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceAddressBindingFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DeviceAddressBindingFlagsEXT, Flags); impl DeviceAddressBindingFlagsEXT { pub const INTERNAL_OBJECT: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FrameBoundaryFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(FrameBoundaryFlagsEXT, Flags); impl FrameBoundaryFlagsEXT { pub const FRAME_END: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PresentScalingFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PresentScalingFlagsEXT, Flags); impl PresentScalingFlagsEXT { pub const ONE_TO_ONE: Self = Self(0b1); pub const ASPECT_RATIO_STRETCH: Self = Self(0b10); pub const STRETCH: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PresentGravityFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PresentGravityFlagsEXT, Flags); impl PresentGravityFlagsEXT { pub const MIN: Self = Self(0b1); pub const MAX: Self = Self(0b10); pub const CENTERED: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PhysicalDeviceSchedulingControlsFlagsARM(pub(crate) Flags64); vk_bitflags_wrapped!(PhysicalDeviceSchedulingControlsFlagsARM, Flags64); impl PhysicalDeviceSchedulingControlsFlagsARM { pub const SHADER_CORE_COUNT: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoCodecOperationFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoCodecOperationFlagsKHR, Flags); impl VideoCodecOperationFlagsKHR { pub const NONE: Self = Self(0); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoChromaSubsamplingFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoChromaSubsamplingFlagsKHR, Flags); impl VideoChromaSubsamplingFlagsKHR { pub const INVALID: Self = Self(0); pub const MONOCHROME: Self = Self(0b1); pub const TYPE_420: Self = Self(0b10); pub const TYPE_422: Self = Self(0b100); pub const TYPE_444: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoComponentBitDepthFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoComponentBitDepthFlagsKHR, Flags); impl VideoComponentBitDepthFlagsKHR { pub const INVALID: Self = Self(0); pub const TYPE_8: Self = Self(0b1); pub const TYPE_10: Self = Self(0b100); pub const TYPE_12: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoCapabilityFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoCapabilityFlagsKHR, Flags); impl VideoCapabilityFlagsKHR { pub const PROTECTED_CONTENT: Self = Self(0b1); pub const SEPARATE_REFERENCE_IMAGES: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoSessionCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoSessionCreateFlagsKHR, Flags); impl VideoSessionCreateFlagsKHR { pub const PROTECTED_CONTENT: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoDecodeH264PictureLayoutFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoDecodeH264PictureLayoutFlagsKHR, Flags); impl VideoDecodeH264PictureLayoutFlagsKHR { pub const PROGRESSIVE: Self = Self(0); pub const INTERLACED_INTERLEAVED_LINES: Self = Self(0b1); pub const INTERLACED_SEPARATE_PLANES: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoCodingControlFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoCodingControlFlagsKHR, Flags); impl VideoCodingControlFlagsKHR { pub const RESET: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoDecodeUsageFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoDecodeUsageFlagsKHR, Flags); impl VideoDecodeUsageFlagsKHR { pub const DEFAULT: Self = Self(0); pub const TRANSCODING: Self = Self(0b1); pub const OFFLINE: Self = Self(0b10); pub const STREAMING: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoDecodeCapabilityFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoDecodeCapabilityFlagsKHR, Flags); impl VideoDecodeCapabilityFlagsKHR { pub const DPB_AND_OUTPUT_COINCIDE: Self = Self(0b1); pub const DPB_AND_OUTPUT_DISTINCT: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeFlagsKHR, Flags); impl VideoEncodeFlagsKHR {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeUsageFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeUsageFlagsKHR, Flags); impl VideoEncodeUsageFlagsKHR { pub const DEFAULT: Self = Self(0); pub const TRANSCODING: Self = Self(0b1); pub const STREAMING: Self = Self(0b10); pub const RECORDING: Self = Self(0b100); pub const CONFERENCING: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeContentFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeContentFlagsKHR, Flags); impl VideoEncodeContentFlagsKHR { pub const DEFAULT: Self = Self(0); pub const CAMERA: Self = Self(0b1); pub const DESKTOP: Self = Self(0b10); pub const RENDERED: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeCapabilityFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeCapabilityFlagsKHR, Flags); impl VideoEncodeCapabilityFlagsKHR { pub const PRECEDING_EXTERNALLY_ENCODED_BYTES: Self = Self(0b1); pub const INSUFFICIENTSTREAM_BUFFER_RANGE_DETECTION: Self = Self(0b10); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeFeedbackFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeFeedbackFlagsKHR, Flags); impl VideoEncodeFeedbackFlagsKHR { pub const BITSTREAM_BUFFER_OFFSET: Self = Self(0b1); pub const BITSTREAM_BYTES_WRITTEN: Self = Self(0b10); pub const BITSTREAM_HAS_OVERRIDES: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeRateControlModeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeRateControlModeFlagsKHR, Flags); impl VideoEncodeRateControlModeFlagsKHR { pub const DEFAULT: Self = Self(0); pub const DISABLED: Self = Self(0b1); pub const CBR: Self = Self(0b10); pub const VBR: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH264CapabilityFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH264CapabilityFlagsKHR, Flags); impl VideoEncodeH264CapabilityFlagsKHR { pub const HRD_COMPLIANCE: Self = Self(0b1); pub const PREDICTION_WEIGHT_TABLE_GENERATED: Self = Self(0b10); pub const ROW_UNALIGNED_SLICE: Self = Self(0b100); pub const DIFFERENT_SLICE_TYPE: Self = Self(0b1000); pub const B_FRAME_IN_L0_LIST: Self = Self(0b1_0000); pub const B_FRAME_IN_L1_LIST: Self = Self(0b10_0000); pub const PER_PICTURE_TYPE_MIN_MAX_QP: Self = Self(0b100_0000); pub const PER_SLICE_CONSTANT_QP: Self = Self(0b1000_0000); pub const GENERATE_PREFIX_NALU: Self = Self(0b1_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH264StdFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH264StdFlagsKHR, Flags); impl VideoEncodeH264StdFlagsKHR { pub const SEPARATE_COLOR_PLANE_FLAG_SET: Self = Self(0b1); pub const QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET: Self = Self(0b10); pub const SCALING_MATRIX_PRESENT_FLAG_SET: Self = Self(0b100); pub const CHROMA_QP_INDEX_OFFSET: Self = Self(0b1000); pub const SECOND_CHROMA_QP_INDEX_OFFSET: Self = Self(0b1_0000); pub const PIC_INIT_QP_MINUS26: Self = Self(0b10_0000); pub const WEIGHTED_PRED_FLAG_SET: Self = Self(0b100_0000); pub const WEIGHTED_BIPRED_IDC_EXPLICIT: Self = Self(0b1000_0000); pub const WEIGHTED_BIPRED_IDC_IMPLICIT: Self = Self(0b1_0000_0000); pub const TRANSFORM_8X8_MODE_FLAG_SET: Self = Self(0b10_0000_0000); pub const DIRECT_SPATIAL_MV_PRED_FLAG_UNSET: Self = Self(0b100_0000_0000); pub const ENTROPY_CODING_MODE_FLAG_UNSET: Self = Self(0b1000_0000_0000); pub const ENTROPY_CODING_MODE_FLAG_SET: Self = Self(0b1_0000_0000_0000); pub const DIRECT_8X8_INFERENCE_FLAG_UNSET: Self = Self(0b10_0000_0000_0000); pub const CONSTRAINED_INTRA_PRED_FLAG_SET: Self = Self(0b100_0000_0000_0000); pub const DEBLOCKING_FILTER_DISABLED: Self = Self(0b1000_0000_0000_0000); pub const DEBLOCKING_FILTER_ENABLED: Self = Self(0b1_0000_0000_0000_0000); pub const DEBLOCKING_FILTER_PARTIAL: Self = Self(0b10_0000_0000_0000_0000); pub const SLICE_QP_DELTA: Self = Self(0b1000_0000_0000_0000_0000); pub const DIFFERENT_SLICE_QP_DELTA: Self = Self(0b1_0000_0000_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH264RateControlFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH264RateControlFlagsKHR, Flags); impl VideoEncodeH264RateControlFlagsKHR { pub const ATTEMPT_HRD_COMPLIANCE: Self = Self(0b1); pub const REGULAR_GOP: Self = Self(0b10); pub const REFERENCE_PATTERN_FLAT: Self = Self(0b100); pub const REFERENCE_PATTERN_DYADIC: Self = Self(0b1000); pub const TEMPORAL_LAYER_PATTERN_DYADIC: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct HostImageCopyFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(HostImageCopyFlagsEXT, Flags); impl HostImageCopyFlagsEXT { pub const MEMCPY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageFormatConstraintsFlagsFUCHSIA(pub(crate) Flags); vk_bitflags_wrapped!(ImageFormatConstraintsFlagsFUCHSIA, Flags); impl ImageFormatConstraintsFlagsFUCHSIA {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageConstraintsInfoFlagsFUCHSIA(pub(crate) Flags); vk_bitflags_wrapped!(ImageConstraintsInfoFlagsFUCHSIA, Flags); impl ImageConstraintsInfoFlagsFUCHSIA { pub const CPU_READ_RARELY: Self = Self(0b1); pub const CPU_READ_OFTEN: Self = Self(0b10); pub const CPU_WRITE_RARELY: Self = Self(0b100); pub const CPU_WRITE_OFTEN: Self = Self(0b1000); pub const PROTECTED_OPTIONAL: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct FormatFeatureFlags2(pub(crate) Flags64); vk_bitflags_wrapped!(FormatFeatureFlags2, Flags64); impl FormatFeatureFlags2 { pub const SAMPLED_IMAGE: Self = Self(0b1); pub const SAMPLED_IMAGE_KHR: Self = Self::SAMPLED_IMAGE; pub const STORAGE_IMAGE: Self = Self(0b10); pub const STORAGE_IMAGE_KHR: Self = Self::STORAGE_IMAGE; pub const STORAGE_IMAGE_ATOMIC: Self = Self(0b100); pub const STORAGE_IMAGE_ATOMIC_KHR: Self = Self::STORAGE_IMAGE_ATOMIC; pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b1000); pub const UNIFORM_TEXEL_BUFFER_KHR: Self = Self::UNIFORM_TEXEL_BUFFER; pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1_0000); pub const STORAGE_TEXEL_BUFFER_KHR: Self = Self::STORAGE_TEXEL_BUFFER; pub const STORAGE_TEXEL_BUFFER_ATOMIC: Self = Self(0b10_0000); pub const STORAGE_TEXEL_BUFFER_ATOMIC_KHR: Self = Self::STORAGE_TEXEL_BUFFER_ATOMIC; pub const VERTEX_BUFFER: Self = Self(0b100_0000); pub const VERTEX_BUFFER_KHR: Self = Self::VERTEX_BUFFER; pub const COLOR_ATTACHMENT: Self = Self(0b1000_0000); pub const COLOR_ATTACHMENT_KHR: Self = Self::COLOR_ATTACHMENT; pub const COLOR_ATTACHMENT_BLEND: Self = Self(0b1_0000_0000); pub const COLOR_ATTACHMENT_BLEND_KHR: Self = Self::COLOR_ATTACHMENT_BLEND; pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000_0000); pub const DEPTH_STENCIL_ATTACHMENT_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT; pub const BLIT_SRC: Self = Self(0b100_0000_0000); pub const BLIT_SRC_KHR: Self = Self::BLIT_SRC; pub const BLIT_DST: Self = Self(0b1000_0000_0000); pub const BLIT_DST_KHR: Self = Self::BLIT_DST; pub const SAMPLED_IMAGE_FILTER_LINEAR: Self = Self(0b1_0000_0000_0000); pub const SAMPLED_IMAGE_FILTER_LINEAR_KHR: Self = Self::SAMPLED_IMAGE_FILTER_LINEAR; pub const SAMPLED_IMAGE_FILTER_CUBIC: Self = Self(0b10_0000_0000_0000); pub const SAMPLED_IMAGE_FILTER_CUBIC_EXT: Self = Self::SAMPLED_IMAGE_FILTER_CUBIC; pub const TRANSFER_SRC: Self = Self(0b100_0000_0000_0000); pub const TRANSFER_SRC_KHR: Self = Self::TRANSFER_SRC; pub const TRANSFER_DST: Self = Self(0b1000_0000_0000_0000); pub const TRANSFER_DST_KHR: Self = Self::TRANSFER_DST; pub const SAMPLED_IMAGE_FILTER_MINMAX: Self = Self(0b1_0000_0000_0000_0000); pub const SAMPLED_IMAGE_FILTER_MINMAX_KHR: Self = Self::SAMPLED_IMAGE_FILTER_MINMAX; pub const MIDPOINT_CHROMA_SAMPLES: Self = Self(0b10_0000_0000_0000_0000); pub const MIDPOINT_CHROMA_SAMPLES_KHR: Self = Self::MIDPOINT_CHROMA_SAMPLES; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER: Self = Self(0b100_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER: Self = Self(0b1000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT: Self = Self(0b1_0000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE: Self = Self(0b10_0000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE; pub const DISJOINT: Self = Self(0b100_0000_0000_0000_0000_0000); pub const DISJOINT_KHR: Self = Self::DISJOINT; pub const COSITED_CHROMA_SAMPLES: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const COSITED_CHROMA_SAMPLES_KHR: Self = Self::COSITED_CHROMA_SAMPLES; pub const STORAGE_READ_WITHOUT_FORMAT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000); pub const STORAGE_READ_WITHOUT_FORMAT_KHR: Self = Self::STORAGE_READ_WITHOUT_FORMAT; pub const STORAGE_WRITE_WITHOUT_FORMAT: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000); pub const STORAGE_WRITE_WITHOUT_FORMAT_KHR: Self = Self::STORAGE_WRITE_WITHOUT_FORMAT; pub const SAMPLED_IMAGE_DEPTH_COMPARISON: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_DEPTH_COMPARISON_KHR: Self = Self::SAMPLED_IMAGE_DEPTH_COMPARISON; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct RenderingFlags(pub(crate) Flags); vk_bitflags_wrapped!(RenderingFlags, Flags); impl RenderingFlags { pub const CONTENTS_SECONDARY_COMMAND_BUFFERS: Self = Self(0b1); pub const CONTENTS_SECONDARY_COMMAND_BUFFERS_KHR: Self = Self::CONTENTS_SECONDARY_COMMAND_BUFFERS; pub const SUSPENDING: Self = Self(0b10); pub const SUSPENDING_KHR: Self = Self::SUSPENDING; pub const RESUMING: Self = Self(0b100); pub const RESUMING_KHR: Self = Self::RESUMING; } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH265CapabilityFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH265CapabilityFlagsKHR, Flags); impl VideoEncodeH265CapabilityFlagsKHR { pub const HRD_COMPLIANCE: Self = Self(0b1); pub const PREDICTION_WEIGHT_TABLE_GENERATED: Self = Self(0b10); pub const ROW_UNALIGNED_SLICE_SEGMENT: Self = Self(0b100); pub const DIFFERENT_SLICE_SEGMENT_TYPE: Self = Self(0b1000); pub const B_FRAME_IN_L0_LIST: Self = Self(0b1_0000); pub const B_FRAME_IN_L1_LIST: Self = Self(0b10_0000); pub const PER_PICTURE_TYPE_MIN_MAX_QP: Self = Self(0b100_0000); pub const PER_SLICE_SEGMENT_CONSTANT_QP: Self = Self(0b1000_0000); pub const MULTIPLE_TILES_PER_SLICE_SEGMENT: Self = Self(0b1_0000_0000); pub const MULTIPLE_SLICE_SEGMENTS_PER_TILE: Self = Self(0b10_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH265StdFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH265StdFlagsKHR, Flags); impl VideoEncodeH265StdFlagsKHR { pub const SEPARATE_COLOR_PLANE_FLAG_SET: Self = Self(0b1); pub const SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET: Self = Self(0b10); pub const SCALING_LIST_DATA_PRESENT_FLAG_SET: Self = Self(0b100); pub const PCM_ENABLED_FLAG_SET: Self = Self(0b1000); pub const SPS_TEMPORAL_MVP_ENABLED_FLAG_SET: Self = Self(0b1_0000); pub const INIT_QP_MINUS26: Self = Self(0b10_0000); pub const WEIGHTED_PRED_FLAG_SET: Self = Self(0b100_0000); pub const WEIGHTED_BIPRED_FLAG_SET: Self = Self(0b1000_0000); pub const LOG2_PARALLEL_MERGE_LEVEL_MINUS2: Self = Self(0b1_0000_0000); pub const SIGN_DATA_HIDING_ENABLED_FLAG_SET: Self = Self(0b10_0000_0000); pub const TRANSFORM_SKIP_ENABLED_FLAG_SET: Self = Self(0b100_0000_0000); pub const TRANSFORM_SKIP_ENABLED_FLAG_UNSET: Self = Self(0b1000_0000_0000); pub const PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET: Self = Self(0b1_0000_0000_0000); pub const TRANSQUANT_BYPASS_ENABLED_FLAG_SET: Self = Self(0b10_0000_0000_0000); pub const CONSTRAINED_INTRA_PRED_FLAG_SET: Self = Self(0b100_0000_0000_0000); pub const ENTROPY_CODING_SYNC_ENABLED_FLAG_SET: Self = Self(0b1000_0000_0000_0000); pub const DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET: Self = Self(0b1_0000_0000_0000_0000); pub const DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET: Self = Self(0b10_0000_0000_0000_0000); pub const DEPENDENT_SLICE_SEGMENT_FLAG_SET: Self = Self(0b100_0000_0000_0000_0000); pub const SLICE_QP_DELTA: Self = Self(0b1000_0000_0000_0000_0000); pub const DIFFERENT_SLICE_QP_DELTA: Self = Self(0b1_0000_0000_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH265RateControlFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH265RateControlFlagsKHR, Flags); impl VideoEncodeH265RateControlFlagsKHR { pub const ATTEMPT_HRD_COMPLIANCE: Self = Self(0b1); pub const REGULAR_GOP: Self = Self(0b10); pub const REFERENCE_PATTERN_FLAT: Self = Self(0b100); pub const REFERENCE_PATTERN_DYADIC: Self = Self(0b1000); pub const TEMPORAL_SUB_LAYER_PATTERN_DYADIC: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH265CtbSizeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH265CtbSizeFlagsKHR, Flags); impl VideoEncodeH265CtbSizeFlagsKHR { pub const TYPE_16: Self = Self(0b1); pub const TYPE_32: Self = Self(0b10); pub const TYPE_64: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeH265TransformBlockSizeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeH265TransformBlockSizeFlagsKHR, Flags); impl VideoEncodeH265TransformBlockSizeFlagsKHR { pub const TYPE_4: Self = Self(0b1); pub const TYPE_8: Self = Self(0b10); pub const TYPE_16: Self = Self(0b100); pub const TYPE_32: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ExportMetalObjectTypeFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ExportMetalObjectTypeFlagsEXT, Flags); impl ExportMetalObjectTypeFlagsEXT { pub const METAL_DEVICE: Self = Self(0b1); pub const METAL_COMMAND_QUEUE: Self = Self(0b10); pub const METAL_BUFFER: Self = Self(0b100); pub const METAL_TEXTURE: Self = Self(0b1000); pub const METAL_IOSURFACE: Self = Self(0b1_0000); pub const METAL_SHARED_EVENT: Self = Self(0b10_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct InstanceCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(InstanceCreateFlags, Flags); impl InstanceCreateFlags {} #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageCompressionFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ImageCompressionFlagsEXT, Flags); impl ImageCompressionFlagsEXT { pub const DEFAULT: Self = Self(0); pub const FIXED_RATE_DEFAULT: Self = Self(0b1); pub const FIXED_RATE_EXPLICIT: Self = Self(0b10); pub const DISABLED: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImageCompressionFixedRateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ImageCompressionFixedRateFlagsEXT, Flags); impl ImageCompressionFixedRateFlagsEXT { pub const NONE: Self = Self(0); pub const TYPE_1BPC: Self = Self(0b1); pub const TYPE_2BPC: Self = Self(0b10); pub const TYPE_3BPC: Self = Self(0b100); pub const TYPE_4BPC: Self = Self(0b1000); pub const TYPE_5BPC: Self = Self(0b1_0000); pub const TYPE_6BPC: Self = Self(0b10_0000); pub const TYPE_7BPC: Self = Self(0b100_0000); pub const TYPE_8BPC: Self = Self(0b1000_0000); pub const TYPE_9BPC: Self = Self(0b1_0000_0000); pub const TYPE_10BPC: Self = Self(0b10_0000_0000); pub const TYPE_11BPC: Self = Self(0b100_0000_0000); pub const TYPE_12BPC: Self = Self(0b1000_0000_0000); pub const TYPE_13BPC: Self = Self(0b1_0000_0000_0000); pub const TYPE_14BPC: Self = Self(0b10_0000_0000_0000); pub const TYPE_15BPC: Self = Self(0b100_0000_0000_0000); pub const TYPE_16BPC: Self = Self(0b1000_0000_0000_0000); pub const TYPE_17BPC: Self = Self(0b1_0000_0000_0000_0000); pub const TYPE_18BPC: Self = Self(0b10_0000_0000_0000_0000); pub const TYPE_19BPC: Self = Self(0b100_0000_0000_0000_0000); pub const TYPE_20BPC: Self = Self(0b1000_0000_0000_0000_0000); pub const TYPE_21BPC: Self = Self(0b1_0000_0000_0000_0000_0000); pub const TYPE_22BPC: Self = Self(0b10_0000_0000_0000_0000_0000); pub const TYPE_23BPC: Self = Self(0b100_0000_0000_0000_0000_0000); pub const TYPE_24BPC: Self = Self(0b1000_0000_0000_0000_0000_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct OpticalFlowGridSizeFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(OpticalFlowGridSizeFlagsNV, Flags); impl OpticalFlowGridSizeFlagsNV { pub const UNKNOWN: Self = Self(0); pub const TYPE_1X1: Self = Self(0b1); pub const TYPE_2X2: Self = Self(0b10); pub const TYPE_4X4: Self = Self(0b100); pub const TYPE_8X8: Self = Self(0b1000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct OpticalFlowUsageFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(OpticalFlowUsageFlagsNV, Flags); impl OpticalFlowUsageFlagsNV { pub const UNKNOWN: Self = Self(0); pub const INPUT: Self = Self(0b1); pub const OUTPUT: Self = Self(0b10); pub const HINT: Self = Self(0b100); pub const COST: Self = Self(0b1000); pub const GLOBAL_FLOW: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct OpticalFlowSessionCreateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(OpticalFlowSessionCreateFlagsNV, Flags); impl OpticalFlowSessionCreateFlagsNV { pub const ENABLE_HINT: Self = Self(0b1); pub const ENABLE_COST: Self = Self(0b10); pub const ENABLE_GLOBAL_FLOW: Self = Self(0b100); pub const ALLOW_REGIONS: Self = Self(0b1000); pub const BOTH_DIRECTIONS: Self = Self(0b1_0000); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct OpticalFlowExecuteFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(OpticalFlowExecuteFlagsNV, Flags); impl OpticalFlowExecuteFlagsNV { pub const DISABLE_TEMPORAL_HINTS: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BuildMicromapFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(BuildMicromapFlagsEXT, Flags); impl BuildMicromapFlagsEXT { pub const PREFER_FAST_TRACE: Self = Self(0b1); pub const PREFER_FAST_BUILD: Self = Self(0b10); pub const ALLOW_COMPACTION: Self = Self(0b100); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MicromapCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(MicromapCreateFlagsEXT, Flags); impl MicromapCreateFlagsEXT { pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ShaderCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ShaderCreateFlagsEXT, Flags); impl ShaderCreateFlagsEXT { pub const LINK_STAGE: Self = Self(0b1); } #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MemoryUnmapFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(MemoryUnmapFlagsKHR, Flags); impl MemoryUnmapFlagsKHR {} ash-0.38.0+1.3.281/src/vk/const_debugs.rs000064400000000000000000012542061046102023000155430ustar 00000000000000use crate::prelude::debug_flags; use crate::vk::bitflags::*; use crate::vk::definitions::*; use crate::vk::enums::*; use core::fmt; impl fmt::Debug for AccelerationStructureBuildTypeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::HOST => Some("HOST"), Self::DEVICE => Some("DEVICE"), Self::HOST_OR_DEVICE => Some("HOST_OR_DEVICE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AccelerationStructureCompatibilityKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COMPATIBLE => Some("COMPATIBLE"), Self::INCOMPATIBLE => Some("INCOMPATIBLE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AccelerationStructureCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( AccelerationStructureCreateFlagsKHR::DEVICE_ADDRESS_CAPTURE_REPLAY.0, "DEVICE_ADDRESS_CAPTURE_REPLAY", ), ( AccelerationStructureCreateFlagsKHR::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0, "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT", ), ( AccelerationStructureCreateFlagsKHR::MOTION_NV.0, "MOTION_NV", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AccelerationStructureMemoryRequirementsTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OBJECT => Some("OBJECT"), Self::BUILD_SCRATCH => Some("BUILD_SCRATCH"), Self::UPDATE_SCRATCH => Some("UPDATE_SCRATCH"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AccelerationStructureMotionInfoFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AccelerationStructureMotionInstanceFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AccelerationStructureMotionInstanceTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::STATIC => Some("STATIC"), Self::MATRIX_MOTION => Some("MATRIX_MOTION"), Self::SRT_MOTION => Some("SRT_MOTION"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AccelerationStructureTypeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TOP_LEVEL => Some("TOP_LEVEL"), Self::BOTTOM_LEVEL => Some("BOTTOM_LEVEL"), Self::GENERIC => Some("GENERIC"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AccessFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( AccessFlags::INDIRECT_COMMAND_READ.0, "INDIRECT_COMMAND_READ", ), (AccessFlags::INDEX_READ.0, "INDEX_READ"), ( AccessFlags::VERTEX_ATTRIBUTE_READ.0, "VERTEX_ATTRIBUTE_READ", ), (AccessFlags::UNIFORM_READ.0, "UNIFORM_READ"), ( AccessFlags::INPUT_ATTACHMENT_READ.0, "INPUT_ATTACHMENT_READ", ), (AccessFlags::SHADER_READ.0, "SHADER_READ"), (AccessFlags::SHADER_WRITE.0, "SHADER_WRITE"), ( AccessFlags::COLOR_ATTACHMENT_READ.0, "COLOR_ATTACHMENT_READ", ), ( AccessFlags::COLOR_ATTACHMENT_WRITE.0, "COLOR_ATTACHMENT_WRITE", ), ( AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ.0, "DEPTH_STENCIL_ATTACHMENT_READ", ), ( AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE.0, "DEPTH_STENCIL_ATTACHMENT_WRITE", ), (AccessFlags::TRANSFER_READ.0, "TRANSFER_READ"), (AccessFlags::TRANSFER_WRITE.0, "TRANSFER_WRITE"), (AccessFlags::HOST_READ.0, "HOST_READ"), (AccessFlags::HOST_WRITE.0, "HOST_WRITE"), (AccessFlags::MEMORY_READ.0, "MEMORY_READ"), (AccessFlags::MEMORY_WRITE.0, "MEMORY_WRITE"), ( AccessFlags::TRANSFORM_FEEDBACK_WRITE_EXT.0, "TRANSFORM_FEEDBACK_WRITE_EXT", ), ( AccessFlags::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_READ_EXT", ), ( AccessFlags::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT", ), ( AccessFlags::CONDITIONAL_RENDERING_READ_EXT.0, "CONDITIONAL_RENDERING_READ_EXT", ), ( AccessFlags::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0, "COLOR_ATTACHMENT_READ_NONCOHERENT_EXT", ), ( AccessFlags::ACCELERATION_STRUCTURE_READ_KHR.0, "ACCELERATION_STRUCTURE_READ_KHR", ), ( AccessFlags::ACCELERATION_STRUCTURE_WRITE_KHR.0, "ACCELERATION_STRUCTURE_WRITE_KHR", ), ( AccessFlags::FRAGMENT_DENSITY_MAP_READ_EXT.0, "FRAGMENT_DENSITY_MAP_READ_EXT", ), ( AccessFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR.0, "FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR", ), ( AccessFlags::COMMAND_PREPROCESS_READ_NV.0, "COMMAND_PREPROCESS_READ_NV", ), ( AccessFlags::COMMAND_PREPROCESS_WRITE_NV.0, "COMMAND_PREPROCESS_WRITE_NV", ), (AccessFlags::NONE.0, "NONE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AccessFlags2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[ (AccessFlags2::NONE.0, "NONE"), ( AccessFlags2::INDIRECT_COMMAND_READ.0, "INDIRECT_COMMAND_READ", ), (AccessFlags2::INDEX_READ.0, "INDEX_READ"), ( AccessFlags2::VERTEX_ATTRIBUTE_READ.0, "VERTEX_ATTRIBUTE_READ", ), (AccessFlags2::UNIFORM_READ.0, "UNIFORM_READ"), ( AccessFlags2::INPUT_ATTACHMENT_READ.0, "INPUT_ATTACHMENT_READ", ), (AccessFlags2::SHADER_READ.0, "SHADER_READ"), (AccessFlags2::SHADER_WRITE.0, "SHADER_WRITE"), ( AccessFlags2::COLOR_ATTACHMENT_READ.0, "COLOR_ATTACHMENT_READ", ), ( AccessFlags2::COLOR_ATTACHMENT_WRITE.0, "COLOR_ATTACHMENT_WRITE", ), ( AccessFlags2::DEPTH_STENCIL_ATTACHMENT_READ.0, "DEPTH_STENCIL_ATTACHMENT_READ", ), ( AccessFlags2::DEPTH_STENCIL_ATTACHMENT_WRITE.0, "DEPTH_STENCIL_ATTACHMENT_WRITE", ), (AccessFlags2::TRANSFER_READ.0, "TRANSFER_READ"), (AccessFlags2::TRANSFER_WRITE.0, "TRANSFER_WRITE"), (AccessFlags2::HOST_READ.0, "HOST_READ"), (AccessFlags2::HOST_WRITE.0, "HOST_WRITE"), (AccessFlags2::MEMORY_READ.0, "MEMORY_READ"), (AccessFlags2::MEMORY_WRITE.0, "MEMORY_WRITE"), (AccessFlags2::SHADER_SAMPLED_READ.0, "SHADER_SAMPLED_READ"), (AccessFlags2::SHADER_STORAGE_READ.0, "SHADER_STORAGE_READ"), (AccessFlags2::SHADER_STORAGE_WRITE.0, "SHADER_STORAGE_WRITE"), ( AccessFlags2::VIDEO_DECODE_READ_KHR.0, "VIDEO_DECODE_READ_KHR", ), ( AccessFlags2::VIDEO_DECODE_WRITE_KHR.0, "VIDEO_DECODE_WRITE_KHR", ), ( AccessFlags2::VIDEO_ENCODE_READ_KHR.0, "VIDEO_ENCODE_READ_KHR", ), ( AccessFlags2::VIDEO_ENCODE_WRITE_KHR.0, "VIDEO_ENCODE_WRITE_KHR", ), ( AccessFlags2::TRANSFORM_FEEDBACK_WRITE_EXT.0, "TRANSFORM_FEEDBACK_WRITE_EXT", ), ( AccessFlags2::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_READ_EXT", ), ( AccessFlags2::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT", ), ( AccessFlags2::CONDITIONAL_RENDERING_READ_EXT.0, "CONDITIONAL_RENDERING_READ_EXT", ), ( AccessFlags2::COMMAND_PREPROCESS_READ_NV.0, "COMMAND_PREPROCESS_READ_NV", ), ( AccessFlags2::COMMAND_PREPROCESS_WRITE_NV.0, "COMMAND_PREPROCESS_WRITE_NV", ), ( AccessFlags2::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR.0, "FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR", ), ( AccessFlags2::ACCELERATION_STRUCTURE_READ_KHR.0, "ACCELERATION_STRUCTURE_READ_KHR", ), ( AccessFlags2::ACCELERATION_STRUCTURE_WRITE_KHR.0, "ACCELERATION_STRUCTURE_WRITE_KHR", ), ( AccessFlags2::FRAGMENT_DENSITY_MAP_READ_EXT.0, "FRAGMENT_DENSITY_MAP_READ_EXT", ), ( AccessFlags2::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0, "COLOR_ATTACHMENT_READ_NONCOHERENT_EXT", ), ( AccessFlags2::DESCRIPTOR_BUFFER_READ_EXT.0, "DESCRIPTOR_BUFFER_READ_EXT", ), ( AccessFlags2::INVOCATION_MASK_READ_HUAWEI.0, "INVOCATION_MASK_READ_HUAWEI", ), ( AccessFlags2::SHADER_BINDING_TABLE_READ_KHR.0, "SHADER_BINDING_TABLE_READ_KHR", ), (AccessFlags2::MICROMAP_READ_EXT.0, "MICROMAP_READ_EXT"), (AccessFlags2::MICROMAP_WRITE_EXT.0, "MICROMAP_WRITE_EXT"), (AccessFlags2::OPTICAL_FLOW_READ_NV.0, "OPTICAL_FLOW_READ_NV"), ( AccessFlags2::OPTICAL_FLOW_WRITE_NV.0, "OPTICAL_FLOW_WRITE_NV", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AcquireProfilingLockFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AndroidSurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AttachmentDescriptionFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(AttachmentDescriptionFlags::MAY_ALIAS.0, "MAY_ALIAS")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for AttachmentLoadOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::LOAD => Some("LOAD"), Self::CLEAR => Some("CLEAR"), Self::DONT_CARE => Some("DONT_CARE"), Self::NONE_KHR => Some("NONE_KHR"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for AttachmentStoreOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::STORE => Some("STORE"), Self::DONT_CARE => Some("DONT_CARE"), Self::NONE => Some("NONE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BlendFactor { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ZERO => Some("ZERO"), Self::ONE => Some("ONE"), Self::SRC_COLOR => Some("SRC_COLOR"), Self::ONE_MINUS_SRC_COLOR => Some("ONE_MINUS_SRC_COLOR"), Self::DST_COLOR => Some("DST_COLOR"), Self::ONE_MINUS_DST_COLOR => Some("ONE_MINUS_DST_COLOR"), Self::SRC_ALPHA => Some("SRC_ALPHA"), Self::ONE_MINUS_SRC_ALPHA => Some("ONE_MINUS_SRC_ALPHA"), Self::DST_ALPHA => Some("DST_ALPHA"), Self::ONE_MINUS_DST_ALPHA => Some("ONE_MINUS_DST_ALPHA"), Self::CONSTANT_COLOR => Some("CONSTANT_COLOR"), Self::ONE_MINUS_CONSTANT_COLOR => Some("ONE_MINUS_CONSTANT_COLOR"), Self::CONSTANT_ALPHA => Some("CONSTANT_ALPHA"), Self::ONE_MINUS_CONSTANT_ALPHA => Some("ONE_MINUS_CONSTANT_ALPHA"), Self::SRC_ALPHA_SATURATE => Some("SRC_ALPHA_SATURATE"), Self::SRC1_COLOR => Some("SRC1_COLOR"), Self::ONE_MINUS_SRC1_COLOR => Some("ONE_MINUS_SRC1_COLOR"), Self::SRC1_ALPHA => Some("SRC1_ALPHA"), Self::ONE_MINUS_SRC1_ALPHA => Some("ONE_MINUS_SRC1_ALPHA"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BlendOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ADD => Some("ADD"), Self::SUBTRACT => Some("SUBTRACT"), Self::REVERSE_SUBTRACT => Some("REVERSE_SUBTRACT"), Self::MIN => Some("MIN"), Self::MAX => Some("MAX"), Self::ZERO_EXT => Some("ZERO_EXT"), Self::SRC_EXT => Some("SRC_EXT"), Self::DST_EXT => Some("DST_EXT"), Self::SRC_OVER_EXT => Some("SRC_OVER_EXT"), Self::DST_OVER_EXT => Some("DST_OVER_EXT"), Self::SRC_IN_EXT => Some("SRC_IN_EXT"), Self::DST_IN_EXT => Some("DST_IN_EXT"), Self::SRC_OUT_EXT => Some("SRC_OUT_EXT"), Self::DST_OUT_EXT => Some("DST_OUT_EXT"), Self::SRC_ATOP_EXT => Some("SRC_ATOP_EXT"), Self::DST_ATOP_EXT => Some("DST_ATOP_EXT"), Self::XOR_EXT => Some("XOR_EXT"), Self::MULTIPLY_EXT => Some("MULTIPLY_EXT"), Self::SCREEN_EXT => Some("SCREEN_EXT"), Self::OVERLAY_EXT => Some("OVERLAY_EXT"), Self::DARKEN_EXT => Some("DARKEN_EXT"), Self::LIGHTEN_EXT => Some("LIGHTEN_EXT"), Self::COLORDODGE_EXT => Some("COLORDODGE_EXT"), Self::COLORBURN_EXT => Some("COLORBURN_EXT"), Self::HARDLIGHT_EXT => Some("HARDLIGHT_EXT"), Self::SOFTLIGHT_EXT => Some("SOFTLIGHT_EXT"), Self::DIFFERENCE_EXT => Some("DIFFERENCE_EXT"), Self::EXCLUSION_EXT => Some("EXCLUSION_EXT"), Self::INVERT_EXT => Some("INVERT_EXT"), Self::INVERT_RGB_EXT => Some("INVERT_RGB_EXT"), Self::LINEARDODGE_EXT => Some("LINEARDODGE_EXT"), Self::LINEARBURN_EXT => Some("LINEARBURN_EXT"), Self::VIVIDLIGHT_EXT => Some("VIVIDLIGHT_EXT"), Self::LINEARLIGHT_EXT => Some("LINEARLIGHT_EXT"), Self::PINLIGHT_EXT => Some("PINLIGHT_EXT"), Self::HARDMIX_EXT => Some("HARDMIX_EXT"), Self::HSL_HUE_EXT => Some("HSL_HUE_EXT"), Self::HSL_SATURATION_EXT => Some("HSL_SATURATION_EXT"), Self::HSL_COLOR_EXT => Some("HSL_COLOR_EXT"), Self::HSL_LUMINOSITY_EXT => Some("HSL_LUMINOSITY_EXT"), Self::PLUS_EXT => Some("PLUS_EXT"), Self::PLUS_CLAMPED_EXT => Some("PLUS_CLAMPED_EXT"), Self::PLUS_CLAMPED_ALPHA_EXT => Some("PLUS_CLAMPED_ALPHA_EXT"), Self::PLUS_DARKER_EXT => Some("PLUS_DARKER_EXT"), Self::MINUS_EXT => Some("MINUS_EXT"), Self::MINUS_CLAMPED_EXT => Some("MINUS_CLAMPED_EXT"), Self::CONTRAST_EXT => Some("CONTRAST_EXT"), Self::INVERT_OVG_EXT => Some("INVERT_OVG_EXT"), Self::RED_EXT => Some("RED_EXT"), Self::GREEN_EXT => Some("GREEN_EXT"), Self::BLUE_EXT => Some("BLUE_EXT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BlendOverlapEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNCORRELATED => Some("UNCORRELATED"), Self::DISJOINT => Some("DISJOINT"), Self::CONJOINT => Some("CONJOINT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BlockMatchWindowCompareModeQCOM { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::MIN => Some("MIN"), Self::MAX => Some("MAX"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BorderColor { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FLOAT_TRANSPARENT_BLACK => Some("FLOAT_TRANSPARENT_BLACK"), Self::INT_TRANSPARENT_BLACK => Some("INT_TRANSPARENT_BLACK"), Self::FLOAT_OPAQUE_BLACK => Some("FLOAT_OPAQUE_BLACK"), Self::INT_OPAQUE_BLACK => Some("INT_OPAQUE_BLACK"), Self::FLOAT_OPAQUE_WHITE => Some("FLOAT_OPAQUE_WHITE"), Self::INT_OPAQUE_WHITE => Some("INT_OPAQUE_WHITE"), Self::FLOAT_CUSTOM_EXT => Some("FLOAT_CUSTOM_EXT"), Self::INT_CUSTOM_EXT => Some("INT_CUSTOM_EXT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BufferCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (BufferCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"), (BufferCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"), (BufferCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"), ( BufferCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0, "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT", ), ( BufferCreateFlags::VIDEO_PROFILE_INDEPENDENT_KHR.0, "VIDEO_PROFILE_INDEPENDENT_KHR", ), (BufferCreateFlags::PROTECTED.0, "PROTECTED"), ( BufferCreateFlags::DEVICE_ADDRESS_CAPTURE_REPLAY.0, "DEVICE_ADDRESS_CAPTURE_REPLAY", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BufferUsageFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (BufferUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"), (BufferUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"), ( BufferUsageFlags::UNIFORM_TEXEL_BUFFER.0, "UNIFORM_TEXEL_BUFFER", ), ( BufferUsageFlags::STORAGE_TEXEL_BUFFER.0, "STORAGE_TEXEL_BUFFER", ), (BufferUsageFlags::UNIFORM_BUFFER.0, "UNIFORM_BUFFER"), (BufferUsageFlags::STORAGE_BUFFER.0, "STORAGE_BUFFER"), (BufferUsageFlags::INDEX_BUFFER.0, "INDEX_BUFFER"), (BufferUsageFlags::VERTEX_BUFFER.0, "VERTEX_BUFFER"), (BufferUsageFlags::INDIRECT_BUFFER.0, "INDIRECT_BUFFER"), ( BufferUsageFlags::VIDEO_DECODE_SRC_KHR.0, "VIDEO_DECODE_SRC_KHR", ), ( BufferUsageFlags::VIDEO_DECODE_DST_KHR.0, "VIDEO_DECODE_DST_KHR", ), ( BufferUsageFlags::TRANSFORM_FEEDBACK_BUFFER_EXT.0, "TRANSFORM_FEEDBACK_BUFFER_EXT", ), ( BufferUsageFlags::TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT", ), ( BufferUsageFlags::CONDITIONAL_RENDERING_EXT.0, "CONDITIONAL_RENDERING_EXT", ), ( BufferUsageFlags::EXECUTION_GRAPH_SCRATCH_AMDX.0, "EXECUTION_GRAPH_SCRATCH_AMDX", ), ( BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR.0, "ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR", ), ( BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR.0, "ACCELERATION_STRUCTURE_STORAGE_KHR", ), ( BufferUsageFlags::SHADER_BINDING_TABLE_KHR.0, "SHADER_BINDING_TABLE_KHR", ), ( BufferUsageFlags::VIDEO_ENCODE_DST_KHR.0, "VIDEO_ENCODE_DST_KHR", ), ( BufferUsageFlags::VIDEO_ENCODE_SRC_KHR.0, "VIDEO_ENCODE_SRC_KHR", ), ( BufferUsageFlags::SAMPLER_DESCRIPTOR_BUFFER_EXT.0, "SAMPLER_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags::RESOURCE_DESCRIPTOR_BUFFER_EXT.0, "RESOURCE_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags::PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT.0, "PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags::MICROMAP_BUILD_INPUT_READ_ONLY_EXT.0, "MICROMAP_BUILD_INPUT_READ_ONLY_EXT", ), ( BufferUsageFlags::MICROMAP_STORAGE_EXT.0, "MICROMAP_STORAGE_EXT", ), ( BufferUsageFlags::SHADER_DEVICE_ADDRESS.0, "SHADER_DEVICE_ADDRESS", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BufferUsageFlags2KHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[ (BufferUsageFlags2KHR::TRANSFER_SRC.0, "TRANSFER_SRC"), (BufferUsageFlags2KHR::TRANSFER_DST.0, "TRANSFER_DST"), ( BufferUsageFlags2KHR::UNIFORM_TEXEL_BUFFER.0, "UNIFORM_TEXEL_BUFFER", ), ( BufferUsageFlags2KHR::STORAGE_TEXEL_BUFFER.0, "STORAGE_TEXEL_BUFFER", ), (BufferUsageFlags2KHR::UNIFORM_BUFFER.0, "UNIFORM_BUFFER"), (BufferUsageFlags2KHR::STORAGE_BUFFER.0, "STORAGE_BUFFER"), (BufferUsageFlags2KHR::INDEX_BUFFER.0, "INDEX_BUFFER"), (BufferUsageFlags2KHR::VERTEX_BUFFER.0, "VERTEX_BUFFER"), (BufferUsageFlags2KHR::INDIRECT_BUFFER.0, "INDIRECT_BUFFER"), ( BufferUsageFlags2KHR::EXECUTION_GRAPH_SCRATCH_AMDX.0, "EXECUTION_GRAPH_SCRATCH_AMDX", ), ( BufferUsageFlags2KHR::CONDITIONAL_RENDERING_EXT.0, "CONDITIONAL_RENDERING_EXT", ), ( BufferUsageFlags2KHR::SHADER_BINDING_TABLE.0, "SHADER_BINDING_TABLE", ), ( BufferUsageFlags2KHR::TRANSFORM_FEEDBACK_BUFFER_EXT.0, "TRANSFORM_FEEDBACK_BUFFER_EXT", ), ( BufferUsageFlags2KHR::TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT.0, "TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT", ), (BufferUsageFlags2KHR::VIDEO_DECODE_SRC.0, "VIDEO_DECODE_SRC"), (BufferUsageFlags2KHR::VIDEO_DECODE_DST.0, "VIDEO_DECODE_DST"), (BufferUsageFlags2KHR::VIDEO_ENCODE_DST.0, "VIDEO_ENCODE_DST"), (BufferUsageFlags2KHR::VIDEO_ENCODE_SRC.0, "VIDEO_ENCODE_SRC"), ( BufferUsageFlags2KHR::SHADER_DEVICE_ADDRESS.0, "SHADER_DEVICE_ADDRESS", ), ( BufferUsageFlags2KHR::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY.0, "ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY", ), ( BufferUsageFlags2KHR::ACCELERATION_STRUCTURE_STORAGE.0, "ACCELERATION_STRUCTURE_STORAGE", ), ( BufferUsageFlags2KHR::SAMPLER_DESCRIPTOR_BUFFER_EXT.0, "SAMPLER_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags2KHR::RESOURCE_DESCRIPTOR_BUFFER_EXT.0, "RESOURCE_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags2KHR::PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT.0, "PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT", ), ( BufferUsageFlags2KHR::MICROMAP_BUILD_INPUT_READ_ONLY_EXT.0, "MICROMAP_BUILD_INPUT_READ_ONLY_EXT", ), ( BufferUsageFlags2KHR::MICROMAP_STORAGE_EXT.0, "MICROMAP_STORAGE_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BufferViewCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BuildAccelerationStructureFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( BuildAccelerationStructureFlagsKHR::ALLOW_UPDATE.0, "ALLOW_UPDATE", ), ( BuildAccelerationStructureFlagsKHR::ALLOW_COMPACTION.0, "ALLOW_COMPACTION", ), ( BuildAccelerationStructureFlagsKHR::PREFER_FAST_TRACE.0, "PREFER_FAST_TRACE", ), ( BuildAccelerationStructureFlagsKHR::PREFER_FAST_BUILD.0, "PREFER_FAST_BUILD", ), ( BuildAccelerationStructureFlagsKHR::LOW_MEMORY.0, "LOW_MEMORY", ), (BuildAccelerationStructureFlagsKHR::MOTION_NV.0, "MOTION_NV"), ( BuildAccelerationStructureFlagsKHR::ALLOW_OPACITY_MICROMAP_UPDATE_EXT.0, "ALLOW_OPACITY_MICROMAP_UPDATE_EXT", ), ( BuildAccelerationStructureFlagsKHR::ALLOW_DISABLE_OPACITY_MICROMAPS_EXT.0, "ALLOW_DISABLE_OPACITY_MICROMAPS_EXT", ), ( BuildAccelerationStructureFlagsKHR::ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT.0, "ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT", ), ( BuildAccelerationStructureFlagsKHR::ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV.0, "ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV", ), ( BuildAccelerationStructureFlagsKHR::ALLOW_DATA_ACCESS.0, "ALLOW_DATA_ACCESS", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BuildAccelerationStructureModeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BUILD => Some("BUILD"), Self::UPDATE => Some("UPDATE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for BuildMicromapFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( BuildMicromapFlagsEXT::PREFER_FAST_TRACE.0, "PREFER_FAST_TRACE", ), ( BuildMicromapFlagsEXT::PREFER_FAST_BUILD.0, "PREFER_FAST_BUILD", ), ( BuildMicromapFlagsEXT::ALLOW_COMPACTION.0, "ALLOW_COMPACTION", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for BuildMicromapModeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BUILD => Some("BUILD"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ChromaLocation { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COSITED_EVEN => Some("COSITED_EVEN"), Self::MIDPOINT => Some("MIDPOINT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CoarseSampleOrderTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEFAULT => Some("DEFAULT"), Self::CUSTOM => Some("CUSTOM"), Self::PIXEL_MAJOR => Some("PIXEL_MAJOR"), Self::SAMPLE_MAJOR => Some("SAMPLE_MAJOR"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ColorComponentFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ColorComponentFlags::R.0, "R"), (ColorComponentFlags::G.0, "G"), (ColorComponentFlags::B.0, "B"), (ColorComponentFlags::A.0, "A"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ColorSpaceKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::SRGB_NONLINEAR => Some("SRGB_NONLINEAR"), Self::DISPLAY_P3_NONLINEAR_EXT => Some("DISPLAY_P3_NONLINEAR_EXT"), Self::EXTENDED_SRGB_LINEAR_EXT => Some("EXTENDED_SRGB_LINEAR_EXT"), Self::DISPLAY_P3_LINEAR_EXT => Some("DISPLAY_P3_LINEAR_EXT"), Self::DCI_P3_NONLINEAR_EXT => Some("DCI_P3_NONLINEAR_EXT"), Self::BT709_LINEAR_EXT => Some("BT709_LINEAR_EXT"), Self::BT709_NONLINEAR_EXT => Some("BT709_NONLINEAR_EXT"), Self::BT2020_LINEAR_EXT => Some("BT2020_LINEAR_EXT"), Self::HDR10_ST2084_EXT => Some("HDR10_ST2084_EXT"), Self::DOLBYVISION_EXT => Some("DOLBYVISION_EXT"), Self::HDR10_HLG_EXT => Some("HDR10_HLG_EXT"), Self::ADOBERGB_LINEAR_EXT => Some("ADOBERGB_LINEAR_EXT"), Self::ADOBERGB_NONLINEAR_EXT => Some("ADOBERGB_NONLINEAR_EXT"), Self::PASS_THROUGH_EXT => Some("PASS_THROUGH_EXT"), Self::EXTENDED_SRGB_NONLINEAR_EXT => Some("EXTENDED_SRGB_NONLINEAR_EXT"), Self::DISPLAY_NATIVE_AMD => Some("DISPLAY_NATIVE_AMD"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CommandBufferLevel { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::PRIMARY => Some("PRIMARY"), Self::SECONDARY => Some("SECONDARY"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CommandBufferResetFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( CommandBufferResetFlags::RELEASE_RESOURCES.0, "RELEASE_RESOURCES", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for CommandBufferUsageFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( CommandBufferUsageFlags::ONE_TIME_SUBMIT.0, "ONE_TIME_SUBMIT", ), ( CommandBufferUsageFlags::RENDER_PASS_CONTINUE.0, "RENDER_PASS_CONTINUE", ), ( CommandBufferUsageFlags::SIMULTANEOUS_USE.0, "SIMULTANEOUS_USE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for CommandPoolCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (CommandPoolCreateFlags::TRANSIENT.0, "TRANSIENT"), ( CommandPoolCreateFlags::RESET_COMMAND_BUFFER.0, "RESET_COMMAND_BUFFER", ), (CommandPoolCreateFlags::PROTECTED.0, "PROTECTED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for CommandPoolResetFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( CommandPoolResetFlags::RELEASE_RESOURCES.0, "RELEASE_RESOURCES", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for CommandPoolTrimFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for CompareOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NEVER => Some("NEVER"), Self::LESS => Some("LESS"), Self::EQUAL => Some("EQUAL"), Self::LESS_OR_EQUAL => Some("LESS_OR_EQUAL"), Self::GREATER => Some("GREATER"), Self::NOT_EQUAL => Some("NOT_EQUAL"), Self::GREATER_OR_EQUAL => Some("GREATER_OR_EQUAL"), Self::ALWAYS => Some("ALWAYS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ComponentSwizzle { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::IDENTITY => Some("IDENTITY"), Self::ZERO => Some("ZERO"), Self::ONE => Some("ONE"), Self::R => Some("R"), Self::G => Some("G"), Self::B => Some("B"), Self::A => Some("A"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ComponentTypeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FLOAT16 => Some("FLOAT16"), Self::FLOAT32 => Some("FLOAT32"), Self::FLOAT64 => Some("FLOAT64"), Self::SINT8 => Some("SINT8"), Self::SINT16 => Some("SINT16"), Self::SINT32 => Some("SINT32"), Self::SINT64 => Some("SINT64"), Self::UINT8 => Some("UINT8"), Self::UINT16 => Some("UINT16"), Self::UINT32 => Some("UINT32"), Self::UINT64 => Some("UINT64"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CompositeAlphaFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (CompositeAlphaFlagsKHR::OPAQUE.0, "OPAQUE"), (CompositeAlphaFlagsKHR::PRE_MULTIPLIED.0, "PRE_MULTIPLIED"), (CompositeAlphaFlagsKHR::POST_MULTIPLIED.0, "POST_MULTIPLIED"), (CompositeAlphaFlagsKHR::INHERIT.0, "INHERIT"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ConditionalRenderingFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(ConditionalRenderingFlagsEXT::INVERTED.0, "INVERTED")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ConservativeRasterizationModeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DISABLED => Some("DISABLED"), Self::OVERESTIMATE => Some("OVERESTIMATE"), Self::UNDERESTIMATE => Some("UNDERESTIMATE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CopyAccelerationStructureModeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::CLONE => Some("CLONE"), Self::COMPACT => Some("COMPACT"), Self::SERIALIZE => Some("SERIALIZE"), Self::DESERIALIZE => Some("DESERIALIZE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CopyMicromapModeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::CLONE => Some("CLONE"), Self::SERIALIZE => Some("SERIALIZE"), Self::DESERIALIZE => Some("DESERIALIZE"), Self::COMPACT => Some("COMPACT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CoverageModulationModeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NONE => Some("NONE"), Self::RGB => Some("RGB"), Self::ALPHA => Some("ALPHA"), Self::RGBA => Some("RGBA"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CoverageReductionModeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::MERGE => Some("MERGE"), Self::TRUNCATE => Some("TRUNCATE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CubicFilterWeightsQCOM { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::CATMULL_ROM => Some("CATMULL_ROM"), Self::ZERO_TANGENT_CARDINAL => Some("ZERO_TANGENT_CARDINAL"), Self::B_SPLINE => Some("B_SPLINE"), Self::MITCHELL_NETRAVALI => Some("MITCHELL_NETRAVALI"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for CullModeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (CullModeFlags::NONE.0, "NONE"), (CullModeFlags::FRONT.0, "FRONT"), (CullModeFlags::BACK.0, "BACK"), (CullModeFlags::FRONT_AND_BACK.0, "FRONT_AND_BACK"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DebugReportFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DebugReportFlagsEXT::INFORMATION.0, "INFORMATION"), (DebugReportFlagsEXT::WARNING.0, "WARNING"), ( DebugReportFlagsEXT::PERFORMANCE_WARNING.0, "PERFORMANCE_WARNING", ), (DebugReportFlagsEXT::ERROR.0, "ERROR"), (DebugReportFlagsEXT::DEBUG.0, "DEBUG"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DebugReportObjectTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNKNOWN => Some("UNKNOWN"), Self::INSTANCE => Some("INSTANCE"), Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"), Self::DEVICE => Some("DEVICE"), Self::QUEUE => Some("QUEUE"), Self::SEMAPHORE => Some("SEMAPHORE"), Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"), Self::FENCE => Some("FENCE"), Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"), Self::BUFFER => Some("BUFFER"), Self::IMAGE => Some("IMAGE"), Self::EVENT => Some("EVENT"), Self::QUERY_POOL => Some("QUERY_POOL"), Self::BUFFER_VIEW => Some("BUFFER_VIEW"), Self::IMAGE_VIEW => Some("IMAGE_VIEW"), Self::SHADER_MODULE => Some("SHADER_MODULE"), Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"), Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"), Self::RENDER_PASS => Some("RENDER_PASS"), Self::PIPELINE => Some("PIPELINE"), Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"), Self::SAMPLER => Some("SAMPLER"), Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"), Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"), Self::FRAMEBUFFER => Some("FRAMEBUFFER"), Self::COMMAND_POOL => Some("COMMAND_POOL"), Self::SURFACE_KHR => Some("SURFACE_KHR"), Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"), Self::DEBUG_REPORT_CALLBACK_EXT => Some("DEBUG_REPORT_CALLBACK_EXT"), Self::DISPLAY_KHR => Some("DISPLAY_KHR"), Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"), Self::VALIDATION_CACHE_EXT => Some("VALIDATION_CACHE_EXT"), Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"), Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"), Self::CU_MODULE_NVX => Some("CU_MODULE_NVX"), Self::CU_FUNCTION_NVX => Some("CU_FUNCTION_NVX"), Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"), Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"), Self::CUDA_MODULE_NV => Some("CUDA_MODULE_NV"), Self::CUDA_FUNCTION_NV => Some("CUDA_FUNCTION_NV"), Self::BUFFER_COLLECTION_FUCHSIA => Some("BUFFER_COLLECTION_FUCHSIA"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DebugUtilsMessageSeverityFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DebugUtilsMessageSeverityFlagsEXT::VERBOSE.0, "VERBOSE"), (DebugUtilsMessageSeverityFlagsEXT::INFO.0, "INFO"), (DebugUtilsMessageSeverityFlagsEXT::WARNING.0, "WARNING"), (DebugUtilsMessageSeverityFlagsEXT::ERROR.0, "ERROR"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DebugUtilsMessageTypeFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DebugUtilsMessageTypeFlagsEXT::GENERAL.0, "GENERAL"), (DebugUtilsMessageTypeFlagsEXT::VALIDATION.0, "VALIDATION"), (DebugUtilsMessageTypeFlagsEXT::PERFORMANCE.0, "PERFORMANCE"), ( DebugUtilsMessageTypeFlagsEXT::DEVICE_ADDRESS_BINDING.0, "DEVICE_ADDRESS_BINDING", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DebugUtilsMessengerCallbackDataFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DebugUtilsMessengerCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DependencyFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DependencyFlags::BY_REGION.0, "BY_REGION"), (DependencyFlags::FEEDBACK_LOOP_EXT.0, "FEEDBACK_LOOP_EXT"), (DependencyFlags::DEVICE_GROUP.0, "DEVICE_GROUP"), (DependencyFlags::VIEW_LOCAL.0, "VIEW_LOCAL"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DepthBiasRepresentationEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::LEAST_REPRESENTABLE_VALUE_FORMAT => Some("LEAST_REPRESENTABLE_VALUE_FORMAT"), Self::LEAST_REPRESENTABLE_VALUE_FORCE_UNORM => { Some("LEAST_REPRESENTABLE_VALUE_FORCE_UNORM") } Self::FLOAT => Some("FLOAT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DescriptorBindingFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( DescriptorBindingFlags::UPDATE_AFTER_BIND.0, "UPDATE_AFTER_BIND", ), ( DescriptorBindingFlags::UPDATE_UNUSED_WHILE_PENDING.0, "UPDATE_UNUSED_WHILE_PENDING", ), (DescriptorBindingFlags::PARTIALLY_BOUND.0, "PARTIALLY_BOUND"), ( DescriptorBindingFlags::VARIABLE_DESCRIPTOR_COUNT.0, "VARIABLE_DESCRIPTOR_COUNT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DescriptorPoolCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET.0, "FREE_DESCRIPTOR_SET", ), (DescriptorPoolCreateFlags::HOST_ONLY_EXT.0, "HOST_ONLY_EXT"), ( DescriptorPoolCreateFlags::ALLOW_OVERALLOCATION_SETS_NV.0, "ALLOW_OVERALLOCATION_SETS_NV", ), ( DescriptorPoolCreateFlags::ALLOW_OVERALLOCATION_POOLS_NV.0, "ALLOW_OVERALLOCATION_POOLS_NV", ), ( DescriptorPoolCreateFlags::UPDATE_AFTER_BIND.0, "UPDATE_AFTER_BIND", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DescriptorPoolResetFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DescriptorSetLayoutCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( DescriptorSetLayoutCreateFlags::PUSH_DESCRIPTOR_KHR.0, "PUSH_DESCRIPTOR_KHR", ), ( DescriptorSetLayoutCreateFlags::DESCRIPTOR_BUFFER_EXT.0, "DESCRIPTOR_BUFFER_EXT", ), ( DescriptorSetLayoutCreateFlags::EMBEDDED_IMMUTABLE_SAMPLERS_EXT.0, "EMBEDDED_IMMUTABLE_SAMPLERS_EXT", ), ( DescriptorSetLayoutCreateFlags::INDIRECT_BINDABLE_NV.0, "INDIRECT_BINDABLE_NV", ), ( DescriptorSetLayoutCreateFlags::HOST_ONLY_POOL_EXT.0, "HOST_ONLY_POOL_EXT", ), ( DescriptorSetLayoutCreateFlags::PER_STAGE_NV.0, "PER_STAGE_NV", ), ( DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL.0, "UPDATE_AFTER_BIND_POOL", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DescriptorType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::SAMPLER => Some("SAMPLER"), Self::COMBINED_IMAGE_SAMPLER => Some("COMBINED_IMAGE_SAMPLER"), Self::SAMPLED_IMAGE => Some("SAMPLED_IMAGE"), Self::STORAGE_IMAGE => Some("STORAGE_IMAGE"), Self::UNIFORM_TEXEL_BUFFER => Some("UNIFORM_TEXEL_BUFFER"), Self::STORAGE_TEXEL_BUFFER => Some("STORAGE_TEXEL_BUFFER"), Self::UNIFORM_BUFFER => Some("UNIFORM_BUFFER"), Self::STORAGE_BUFFER => Some("STORAGE_BUFFER"), Self::UNIFORM_BUFFER_DYNAMIC => Some("UNIFORM_BUFFER_DYNAMIC"), Self::STORAGE_BUFFER_DYNAMIC => Some("STORAGE_BUFFER_DYNAMIC"), Self::INPUT_ATTACHMENT => Some("INPUT_ATTACHMENT"), Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"), Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"), Self::SAMPLE_WEIGHT_IMAGE_QCOM => Some("SAMPLE_WEIGHT_IMAGE_QCOM"), Self::BLOCK_MATCH_IMAGE_QCOM => Some("BLOCK_MATCH_IMAGE_QCOM"), Self::MUTABLE_EXT => Some("MUTABLE_EXT"), Self::INLINE_UNIFORM_BLOCK => Some("INLINE_UNIFORM_BLOCK"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DescriptorUpdateTemplateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DescriptorUpdateTemplateType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"), Self::PUSH_DESCRIPTORS_KHR => Some("PUSH_DESCRIPTORS_KHR"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceAddressBindingFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( DeviceAddressBindingFlagsEXT::INTERNAL_OBJECT.0, "INTERNAL_OBJECT", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DeviceAddressBindingTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BIND => Some("BIND"), Self::UNBIND => Some("UNBIND"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DeviceDiagnosticsConfigFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( DeviceDiagnosticsConfigFlagsNV::ENABLE_SHADER_DEBUG_INFO.0, "ENABLE_SHADER_DEBUG_INFO", ), ( DeviceDiagnosticsConfigFlagsNV::ENABLE_RESOURCE_TRACKING.0, "ENABLE_RESOURCE_TRACKING", ), ( DeviceDiagnosticsConfigFlagsNV::ENABLE_AUTOMATIC_CHECKPOINTS.0, "ENABLE_AUTOMATIC_CHECKPOINTS", ), ( DeviceDiagnosticsConfigFlagsNV::ENABLE_SHADER_ERROR_REPORTING.0, "ENABLE_SHADER_ERROR_REPORTING", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DeviceEventTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DISPLAY_HOTPLUG => Some("DISPLAY_HOTPLUG"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceFaultAddressTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NONE => Some("NONE"), Self::READ_INVALID => Some("READ_INVALID"), Self::WRITE_INVALID => Some("WRITE_INVALID"), Self::EXECUTE_INVALID => Some("EXECUTE_INVALID"), Self::INSTRUCTION_POINTER_UNKNOWN => Some("INSTRUCTION_POINTER_UNKNOWN"), Self::INSTRUCTION_POINTER_INVALID => Some("INSTRUCTION_POINTER_INVALID"), Self::INSTRUCTION_POINTER_FAULT => Some("INSTRUCTION_POINTER_FAULT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceFaultVendorBinaryHeaderVersionEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ONE => Some("ONE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceGroupPresentModeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DeviceGroupPresentModeFlagsKHR::LOCAL.0, "LOCAL"), (DeviceGroupPresentModeFlagsKHR::REMOTE.0, "REMOTE"), (DeviceGroupPresentModeFlagsKHR::SUM.0, "SUM"), ( DeviceGroupPresentModeFlagsKHR::LOCAL_MULTI_DEVICE.0, "LOCAL_MULTI_DEVICE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DeviceMemoryReportEventTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ALLOCATE => Some("ALLOCATE"), Self::FREE => Some("FREE"), Self::IMPORT => Some("IMPORT"), Self::UNIMPORT => Some("UNIMPORT"), Self::ALLOCATION_FAILED => Some("ALLOCATION_FAILED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DeviceMemoryReportFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DeviceQueueCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(DeviceQueueCreateFlags::PROTECTED.0, "PROTECTED")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DirectDriverLoadingFlagsLUNARG { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DirectDriverLoadingModeLUNARG { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::EXCLUSIVE => Some("EXCLUSIVE"), Self::INCLUSIVE => Some("INCLUSIVE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DirectFBSurfaceCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DiscardRectangleModeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::INCLUSIVE => Some("INCLUSIVE"), Self::EXCLUSIVE => Some("EXCLUSIVE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DisplacementMicromapFormatNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_64_TRIANGLES_64_BYTES => Some("TYPE_64_TRIANGLES_64_BYTES"), Self::TYPE_256_TRIANGLES_128_BYTES => Some("TYPE_256_TRIANGLES_128_BYTES"), Self::TYPE_1024_TRIANGLES_128_BYTES => Some("TYPE_1024_TRIANGLES_128_BYTES"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DisplayEventTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FIRST_PIXEL_OUT => Some("FIRST_PIXEL_OUT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DisplayModeCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DisplayPlaneAlphaFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (DisplayPlaneAlphaFlagsKHR::OPAQUE.0, "OPAQUE"), (DisplayPlaneAlphaFlagsKHR::GLOBAL.0, "GLOBAL"), (DisplayPlaneAlphaFlagsKHR::PER_PIXEL.0, "PER_PIXEL"), ( DisplayPlaneAlphaFlagsKHR::PER_PIXEL_PREMULTIPLIED.0, "PER_PIXEL_PREMULTIPLIED", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DisplayPowerStateEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OFF => Some("OFF"), Self::SUSPEND => Some("SUSPEND"), Self::ON => Some("ON"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DisplaySurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for DriverId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::AMD_PROPRIETARY => Some("AMD_PROPRIETARY"), Self::AMD_OPEN_SOURCE => Some("AMD_OPEN_SOURCE"), Self::MESA_RADV => Some("MESA_RADV"), Self::NVIDIA_PROPRIETARY => Some("NVIDIA_PROPRIETARY"), Self::INTEL_PROPRIETARY_WINDOWS => Some("INTEL_PROPRIETARY_WINDOWS"), Self::INTEL_OPEN_SOURCE_MESA => Some("INTEL_OPEN_SOURCE_MESA"), Self::IMAGINATION_PROPRIETARY => Some("IMAGINATION_PROPRIETARY"), Self::QUALCOMM_PROPRIETARY => Some("QUALCOMM_PROPRIETARY"), Self::ARM_PROPRIETARY => Some("ARM_PROPRIETARY"), Self::GOOGLE_SWIFTSHADER => Some("GOOGLE_SWIFTSHADER"), Self::GGP_PROPRIETARY => Some("GGP_PROPRIETARY"), Self::BROADCOM_PROPRIETARY => Some("BROADCOM_PROPRIETARY"), Self::MESA_LLVMPIPE => Some("MESA_LLVMPIPE"), Self::MOLTENVK => Some("MOLTENVK"), Self::COREAVI_PROPRIETARY => Some("COREAVI_PROPRIETARY"), Self::JUICE_PROPRIETARY => Some("JUICE_PROPRIETARY"), Self::VERISILICON_PROPRIETARY => Some("VERISILICON_PROPRIETARY"), Self::MESA_TURNIP => Some("MESA_TURNIP"), Self::MESA_V3DV => Some("MESA_V3DV"), Self::MESA_PANVK => Some("MESA_PANVK"), Self::SAMSUNG_PROPRIETARY => Some("SAMSUNG_PROPRIETARY"), Self::MESA_VENUS => Some("MESA_VENUS"), Self::MESA_DOZEN => Some("MESA_DOZEN"), Self::MESA_NVK => Some("MESA_NVK"), Self::IMAGINATION_OPEN_SOURCE_MESA => Some("IMAGINATION_OPEN_SOURCE_MESA"), Self::MESA_AGXV => Some("MESA_AGXV"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for DynamicState { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::VIEWPORT => Some("VIEWPORT"), Self::SCISSOR => Some("SCISSOR"), Self::LINE_WIDTH => Some("LINE_WIDTH"), Self::DEPTH_BIAS => Some("DEPTH_BIAS"), Self::BLEND_CONSTANTS => Some("BLEND_CONSTANTS"), Self::DEPTH_BOUNDS => Some("DEPTH_BOUNDS"), Self::STENCIL_COMPARE_MASK => Some("STENCIL_COMPARE_MASK"), Self::STENCIL_WRITE_MASK => Some("STENCIL_WRITE_MASK"), Self::STENCIL_REFERENCE => Some("STENCIL_REFERENCE"), Self::VIEWPORT_W_SCALING_NV => Some("VIEWPORT_W_SCALING_NV"), Self::DISCARD_RECTANGLE_EXT => Some("DISCARD_RECTANGLE_EXT"), Self::DISCARD_RECTANGLE_ENABLE_EXT => Some("DISCARD_RECTANGLE_ENABLE_EXT"), Self::DISCARD_RECTANGLE_MODE_EXT => Some("DISCARD_RECTANGLE_MODE_EXT"), Self::SAMPLE_LOCATIONS_EXT => Some("SAMPLE_LOCATIONS_EXT"), Self::RAY_TRACING_PIPELINE_STACK_SIZE_KHR => { Some("RAY_TRACING_PIPELINE_STACK_SIZE_KHR") } Self::VIEWPORT_SHADING_RATE_PALETTE_NV => Some("VIEWPORT_SHADING_RATE_PALETTE_NV"), Self::VIEWPORT_COARSE_SAMPLE_ORDER_NV => Some("VIEWPORT_COARSE_SAMPLE_ORDER_NV"), Self::EXCLUSIVE_SCISSOR_ENABLE_NV => Some("EXCLUSIVE_SCISSOR_ENABLE_NV"), Self::EXCLUSIVE_SCISSOR_NV => Some("EXCLUSIVE_SCISSOR_NV"), Self::FRAGMENT_SHADING_RATE_KHR => Some("FRAGMENT_SHADING_RATE_KHR"), Self::VERTEX_INPUT_EXT => Some("VERTEX_INPUT_EXT"), Self::PATCH_CONTROL_POINTS_EXT => Some("PATCH_CONTROL_POINTS_EXT"), Self::LOGIC_OP_EXT => Some("LOGIC_OP_EXT"), Self::COLOR_WRITE_ENABLE_EXT => Some("COLOR_WRITE_ENABLE_EXT"), Self::DEPTH_CLAMP_ENABLE_EXT => Some("DEPTH_CLAMP_ENABLE_EXT"), Self::POLYGON_MODE_EXT => Some("POLYGON_MODE_EXT"), Self::RASTERIZATION_SAMPLES_EXT => Some("RASTERIZATION_SAMPLES_EXT"), Self::SAMPLE_MASK_EXT => Some("SAMPLE_MASK_EXT"), Self::ALPHA_TO_COVERAGE_ENABLE_EXT => Some("ALPHA_TO_COVERAGE_ENABLE_EXT"), Self::ALPHA_TO_ONE_ENABLE_EXT => Some("ALPHA_TO_ONE_ENABLE_EXT"), Self::LOGIC_OP_ENABLE_EXT => Some("LOGIC_OP_ENABLE_EXT"), Self::COLOR_BLEND_ENABLE_EXT => Some("COLOR_BLEND_ENABLE_EXT"), Self::COLOR_BLEND_EQUATION_EXT => Some("COLOR_BLEND_EQUATION_EXT"), Self::COLOR_WRITE_MASK_EXT => Some("COLOR_WRITE_MASK_EXT"), Self::TESSELLATION_DOMAIN_ORIGIN_EXT => Some("TESSELLATION_DOMAIN_ORIGIN_EXT"), Self::RASTERIZATION_STREAM_EXT => Some("RASTERIZATION_STREAM_EXT"), Self::CONSERVATIVE_RASTERIZATION_MODE_EXT => { Some("CONSERVATIVE_RASTERIZATION_MODE_EXT") } Self::EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT => { Some("EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT") } Self::DEPTH_CLIP_ENABLE_EXT => Some("DEPTH_CLIP_ENABLE_EXT"), Self::SAMPLE_LOCATIONS_ENABLE_EXT => Some("SAMPLE_LOCATIONS_ENABLE_EXT"), Self::COLOR_BLEND_ADVANCED_EXT => Some("COLOR_BLEND_ADVANCED_EXT"), Self::PROVOKING_VERTEX_MODE_EXT => Some("PROVOKING_VERTEX_MODE_EXT"), Self::LINE_RASTERIZATION_MODE_EXT => Some("LINE_RASTERIZATION_MODE_EXT"), Self::LINE_STIPPLE_ENABLE_EXT => Some("LINE_STIPPLE_ENABLE_EXT"), Self::DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT => Some("DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT"), Self::VIEWPORT_W_SCALING_ENABLE_NV => Some("VIEWPORT_W_SCALING_ENABLE_NV"), Self::VIEWPORT_SWIZZLE_NV => Some("VIEWPORT_SWIZZLE_NV"), Self::COVERAGE_TO_COLOR_ENABLE_NV => Some("COVERAGE_TO_COLOR_ENABLE_NV"), Self::COVERAGE_TO_COLOR_LOCATION_NV => Some("COVERAGE_TO_COLOR_LOCATION_NV"), Self::COVERAGE_MODULATION_MODE_NV => Some("COVERAGE_MODULATION_MODE_NV"), Self::COVERAGE_MODULATION_TABLE_ENABLE_NV => { Some("COVERAGE_MODULATION_TABLE_ENABLE_NV") } Self::COVERAGE_MODULATION_TABLE_NV => Some("COVERAGE_MODULATION_TABLE_NV"), Self::SHADING_RATE_IMAGE_ENABLE_NV => Some("SHADING_RATE_IMAGE_ENABLE_NV"), Self::REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV => { Some("REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV") } Self::COVERAGE_REDUCTION_MODE_NV => Some("COVERAGE_REDUCTION_MODE_NV"), Self::ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT => { Some("ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT") } Self::LINE_STIPPLE_KHR => Some("LINE_STIPPLE_KHR"), Self::CULL_MODE => Some("CULL_MODE"), Self::FRONT_FACE => Some("FRONT_FACE"), Self::PRIMITIVE_TOPOLOGY => Some("PRIMITIVE_TOPOLOGY"), Self::VIEWPORT_WITH_COUNT => Some("VIEWPORT_WITH_COUNT"), Self::SCISSOR_WITH_COUNT => Some("SCISSOR_WITH_COUNT"), Self::VERTEX_INPUT_BINDING_STRIDE => Some("VERTEX_INPUT_BINDING_STRIDE"), Self::DEPTH_TEST_ENABLE => Some("DEPTH_TEST_ENABLE"), Self::DEPTH_WRITE_ENABLE => Some("DEPTH_WRITE_ENABLE"), Self::DEPTH_COMPARE_OP => Some("DEPTH_COMPARE_OP"), Self::DEPTH_BOUNDS_TEST_ENABLE => Some("DEPTH_BOUNDS_TEST_ENABLE"), Self::STENCIL_TEST_ENABLE => Some("STENCIL_TEST_ENABLE"), Self::STENCIL_OP => Some("STENCIL_OP"), Self::RASTERIZER_DISCARD_ENABLE => Some("RASTERIZER_DISCARD_ENABLE"), Self::DEPTH_BIAS_ENABLE => Some("DEPTH_BIAS_ENABLE"), Self::PRIMITIVE_RESTART_ENABLE => Some("PRIMITIVE_RESTART_ENABLE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for EventCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(EventCreateFlags::DEVICE_ONLY.0, "DEVICE_ONLY")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExportMetalObjectTypeFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ExportMetalObjectTypeFlagsEXT::METAL_DEVICE.0, "METAL_DEVICE", ), ( ExportMetalObjectTypeFlagsEXT::METAL_COMMAND_QUEUE.0, "METAL_COMMAND_QUEUE", ), ( ExportMetalObjectTypeFlagsEXT::METAL_BUFFER.0, "METAL_BUFFER", ), ( ExportMetalObjectTypeFlagsEXT::METAL_TEXTURE.0, "METAL_TEXTURE", ), ( ExportMetalObjectTypeFlagsEXT::METAL_IOSURFACE.0, "METAL_IOSURFACE", ), ( ExportMetalObjectTypeFlagsEXT::METAL_SHARED_EVENT.0, "METAL_SHARED_EVENT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalFenceFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ExternalFenceFeatureFlags::EXPORTABLE.0, "EXPORTABLE"), (ExternalFenceFeatureFlags::IMPORTABLE.0, "IMPORTABLE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalFenceHandleTypeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ExternalFenceHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"), (ExternalFenceHandleTypeFlags::OPAQUE_WIN32.0, "OPAQUE_WIN32"), ( ExternalFenceHandleTypeFlags::OPAQUE_WIN32_KMT.0, "OPAQUE_WIN32_KMT", ), (ExternalFenceHandleTypeFlags::SYNC_FD.0, "SYNC_FD"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalMemoryFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ExternalMemoryFeatureFlags::DEDICATED_ONLY.0, "DEDICATED_ONLY", ), (ExternalMemoryFeatureFlags::EXPORTABLE.0, "EXPORTABLE"), (ExternalMemoryFeatureFlags::IMPORTABLE.0, "IMPORTABLE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalMemoryFeatureFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ExternalMemoryFeatureFlagsNV::DEDICATED_ONLY.0, "DEDICATED_ONLY", ), (ExternalMemoryFeatureFlagsNV::EXPORTABLE.0, "EXPORTABLE"), (ExternalMemoryFeatureFlagsNV::IMPORTABLE.0, "IMPORTABLE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalMemoryHandleTypeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ExternalMemoryHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"), ( ExternalMemoryHandleTypeFlags::OPAQUE_WIN32.0, "OPAQUE_WIN32", ), ( ExternalMemoryHandleTypeFlags::OPAQUE_WIN32_KMT.0, "OPAQUE_WIN32_KMT", ), ( ExternalMemoryHandleTypeFlags::D3D11_TEXTURE.0, "D3D11_TEXTURE", ), ( ExternalMemoryHandleTypeFlags::D3D11_TEXTURE_KMT.0, "D3D11_TEXTURE_KMT", ), (ExternalMemoryHandleTypeFlags::D3D12_HEAP.0, "D3D12_HEAP"), ( ExternalMemoryHandleTypeFlags::D3D12_RESOURCE.0, "D3D12_RESOURCE", ), (ExternalMemoryHandleTypeFlags::DMA_BUF_EXT.0, "DMA_BUF_EXT"), ( ExternalMemoryHandleTypeFlags::ANDROID_HARDWARE_BUFFER_ANDROID.0, "ANDROID_HARDWARE_BUFFER_ANDROID", ), ( ExternalMemoryHandleTypeFlags::HOST_ALLOCATION_EXT.0, "HOST_ALLOCATION_EXT", ), ( ExternalMemoryHandleTypeFlags::HOST_MAPPED_FOREIGN_MEMORY_EXT.0, "HOST_MAPPED_FOREIGN_MEMORY_EXT", ), ( ExternalMemoryHandleTypeFlags::ZIRCON_VMO_FUCHSIA.0, "ZIRCON_VMO_FUCHSIA", ), ( ExternalMemoryHandleTypeFlags::RDMA_ADDRESS_NV.0, "RDMA_ADDRESS_NV", ), ( ExternalMemoryHandleTypeFlags::SCREEN_BUFFER_QNX.0, "SCREEN_BUFFER_QNX", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalMemoryHandleTypeFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ExternalMemoryHandleTypeFlagsNV::OPAQUE_WIN32.0, "OPAQUE_WIN32", ), ( ExternalMemoryHandleTypeFlagsNV::OPAQUE_WIN32_KMT.0, "OPAQUE_WIN32_KMT", ), ( ExternalMemoryHandleTypeFlagsNV::D3D11_IMAGE.0, "D3D11_IMAGE", ), ( ExternalMemoryHandleTypeFlagsNV::D3D11_IMAGE_KMT.0, "D3D11_IMAGE_KMT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalSemaphoreFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ExternalSemaphoreFeatureFlags::EXPORTABLE.0, "EXPORTABLE"), (ExternalSemaphoreFeatureFlags::IMPORTABLE.0, "IMPORTABLE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ExternalSemaphoreHandleTypeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ExternalSemaphoreHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"), ( ExternalSemaphoreHandleTypeFlags::OPAQUE_WIN32.0, "OPAQUE_WIN32", ), ( ExternalSemaphoreHandleTypeFlags::OPAQUE_WIN32_KMT.0, "OPAQUE_WIN32_KMT", ), ( ExternalSemaphoreHandleTypeFlags::D3D12_FENCE.0, "D3D12_FENCE", ), (ExternalSemaphoreHandleTypeFlags::SYNC_FD.0, "SYNC_FD"), ( ExternalSemaphoreHandleTypeFlags::ZIRCON_EVENT_FUCHSIA.0, "ZIRCON_EVENT_FUCHSIA", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FenceCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(FenceCreateFlags::SIGNALED.0, "SIGNALED")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FenceImportFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(FenceImportFlags::TEMPORARY.0, "TEMPORARY")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for Filter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NEAREST => Some("NEAREST"), Self::LINEAR => Some("LINEAR"), Self::CUBIC_EXT => Some("CUBIC_EXT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for Format { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNDEFINED => Some("UNDEFINED"), Self::R4G4_UNORM_PACK8 => Some("R4G4_UNORM_PACK8"), Self::R4G4B4A4_UNORM_PACK16 => Some("R4G4B4A4_UNORM_PACK16"), Self::B4G4R4A4_UNORM_PACK16 => Some("B4G4R4A4_UNORM_PACK16"), Self::R5G6B5_UNORM_PACK16 => Some("R5G6B5_UNORM_PACK16"), Self::B5G6R5_UNORM_PACK16 => Some("B5G6R5_UNORM_PACK16"), Self::R5G5B5A1_UNORM_PACK16 => Some("R5G5B5A1_UNORM_PACK16"), Self::B5G5R5A1_UNORM_PACK16 => Some("B5G5R5A1_UNORM_PACK16"), Self::A1R5G5B5_UNORM_PACK16 => Some("A1R5G5B5_UNORM_PACK16"), Self::R8_UNORM => Some("R8_UNORM"), Self::R8_SNORM => Some("R8_SNORM"), Self::R8_USCALED => Some("R8_USCALED"), Self::R8_SSCALED => Some("R8_SSCALED"), Self::R8_UINT => Some("R8_UINT"), Self::R8_SINT => Some("R8_SINT"), Self::R8_SRGB => Some("R8_SRGB"), Self::R8G8_UNORM => Some("R8G8_UNORM"), Self::R8G8_SNORM => Some("R8G8_SNORM"), Self::R8G8_USCALED => Some("R8G8_USCALED"), Self::R8G8_SSCALED => Some("R8G8_SSCALED"), Self::R8G8_UINT => Some("R8G8_UINT"), Self::R8G8_SINT => Some("R8G8_SINT"), Self::R8G8_SRGB => Some("R8G8_SRGB"), Self::R8G8B8_UNORM => Some("R8G8B8_UNORM"), Self::R8G8B8_SNORM => Some("R8G8B8_SNORM"), Self::R8G8B8_USCALED => Some("R8G8B8_USCALED"), Self::R8G8B8_SSCALED => Some("R8G8B8_SSCALED"), Self::R8G8B8_UINT => Some("R8G8B8_UINT"), Self::R8G8B8_SINT => Some("R8G8B8_SINT"), Self::R8G8B8_SRGB => Some("R8G8B8_SRGB"), Self::B8G8R8_UNORM => Some("B8G8R8_UNORM"), Self::B8G8R8_SNORM => Some("B8G8R8_SNORM"), Self::B8G8R8_USCALED => Some("B8G8R8_USCALED"), Self::B8G8R8_SSCALED => Some("B8G8R8_SSCALED"), Self::B8G8R8_UINT => Some("B8G8R8_UINT"), Self::B8G8R8_SINT => Some("B8G8R8_SINT"), Self::B8G8R8_SRGB => Some("B8G8R8_SRGB"), Self::R8G8B8A8_UNORM => Some("R8G8B8A8_UNORM"), Self::R8G8B8A8_SNORM => Some("R8G8B8A8_SNORM"), Self::R8G8B8A8_USCALED => Some("R8G8B8A8_USCALED"), Self::R8G8B8A8_SSCALED => Some("R8G8B8A8_SSCALED"), Self::R8G8B8A8_UINT => Some("R8G8B8A8_UINT"), Self::R8G8B8A8_SINT => Some("R8G8B8A8_SINT"), Self::R8G8B8A8_SRGB => Some("R8G8B8A8_SRGB"), Self::B8G8R8A8_UNORM => Some("B8G8R8A8_UNORM"), Self::B8G8R8A8_SNORM => Some("B8G8R8A8_SNORM"), Self::B8G8R8A8_USCALED => Some("B8G8R8A8_USCALED"), Self::B8G8R8A8_SSCALED => Some("B8G8R8A8_SSCALED"), Self::B8G8R8A8_UINT => Some("B8G8R8A8_UINT"), Self::B8G8R8A8_SINT => Some("B8G8R8A8_SINT"), Self::B8G8R8A8_SRGB => Some("B8G8R8A8_SRGB"), Self::A8B8G8R8_UNORM_PACK32 => Some("A8B8G8R8_UNORM_PACK32"), Self::A8B8G8R8_SNORM_PACK32 => Some("A8B8G8R8_SNORM_PACK32"), Self::A8B8G8R8_USCALED_PACK32 => Some("A8B8G8R8_USCALED_PACK32"), Self::A8B8G8R8_SSCALED_PACK32 => Some("A8B8G8R8_SSCALED_PACK32"), Self::A8B8G8R8_UINT_PACK32 => Some("A8B8G8R8_UINT_PACK32"), Self::A8B8G8R8_SINT_PACK32 => Some("A8B8G8R8_SINT_PACK32"), Self::A8B8G8R8_SRGB_PACK32 => Some("A8B8G8R8_SRGB_PACK32"), Self::A2R10G10B10_UNORM_PACK32 => Some("A2R10G10B10_UNORM_PACK32"), Self::A2R10G10B10_SNORM_PACK32 => Some("A2R10G10B10_SNORM_PACK32"), Self::A2R10G10B10_USCALED_PACK32 => Some("A2R10G10B10_USCALED_PACK32"), Self::A2R10G10B10_SSCALED_PACK32 => Some("A2R10G10B10_SSCALED_PACK32"), Self::A2R10G10B10_UINT_PACK32 => Some("A2R10G10B10_UINT_PACK32"), Self::A2R10G10B10_SINT_PACK32 => Some("A2R10G10B10_SINT_PACK32"), Self::A2B10G10R10_UNORM_PACK32 => Some("A2B10G10R10_UNORM_PACK32"), Self::A2B10G10R10_SNORM_PACK32 => Some("A2B10G10R10_SNORM_PACK32"), Self::A2B10G10R10_USCALED_PACK32 => Some("A2B10G10R10_USCALED_PACK32"), Self::A2B10G10R10_SSCALED_PACK32 => Some("A2B10G10R10_SSCALED_PACK32"), Self::A2B10G10R10_UINT_PACK32 => Some("A2B10G10R10_UINT_PACK32"), Self::A2B10G10R10_SINT_PACK32 => Some("A2B10G10R10_SINT_PACK32"), Self::R16_UNORM => Some("R16_UNORM"), Self::R16_SNORM => Some("R16_SNORM"), Self::R16_USCALED => Some("R16_USCALED"), Self::R16_SSCALED => Some("R16_SSCALED"), Self::R16_UINT => Some("R16_UINT"), Self::R16_SINT => Some("R16_SINT"), Self::R16_SFLOAT => Some("R16_SFLOAT"), Self::R16G16_UNORM => Some("R16G16_UNORM"), Self::R16G16_SNORM => Some("R16G16_SNORM"), Self::R16G16_USCALED => Some("R16G16_USCALED"), Self::R16G16_SSCALED => Some("R16G16_SSCALED"), Self::R16G16_UINT => Some("R16G16_UINT"), Self::R16G16_SINT => Some("R16G16_SINT"), Self::R16G16_SFLOAT => Some("R16G16_SFLOAT"), Self::R16G16B16_UNORM => Some("R16G16B16_UNORM"), Self::R16G16B16_SNORM => Some("R16G16B16_SNORM"), Self::R16G16B16_USCALED => Some("R16G16B16_USCALED"), Self::R16G16B16_SSCALED => Some("R16G16B16_SSCALED"), Self::R16G16B16_UINT => Some("R16G16B16_UINT"), Self::R16G16B16_SINT => Some("R16G16B16_SINT"), Self::R16G16B16_SFLOAT => Some("R16G16B16_SFLOAT"), Self::R16G16B16A16_UNORM => Some("R16G16B16A16_UNORM"), Self::R16G16B16A16_SNORM => Some("R16G16B16A16_SNORM"), Self::R16G16B16A16_USCALED => Some("R16G16B16A16_USCALED"), Self::R16G16B16A16_SSCALED => Some("R16G16B16A16_SSCALED"), Self::R16G16B16A16_UINT => Some("R16G16B16A16_UINT"), Self::R16G16B16A16_SINT => Some("R16G16B16A16_SINT"), Self::R16G16B16A16_SFLOAT => Some("R16G16B16A16_SFLOAT"), Self::R32_UINT => Some("R32_UINT"), Self::R32_SINT => Some("R32_SINT"), Self::R32_SFLOAT => Some("R32_SFLOAT"), Self::R32G32_UINT => Some("R32G32_UINT"), Self::R32G32_SINT => Some("R32G32_SINT"), Self::R32G32_SFLOAT => Some("R32G32_SFLOAT"), Self::R32G32B32_UINT => Some("R32G32B32_UINT"), Self::R32G32B32_SINT => Some("R32G32B32_SINT"), Self::R32G32B32_SFLOAT => Some("R32G32B32_SFLOAT"), Self::R32G32B32A32_UINT => Some("R32G32B32A32_UINT"), Self::R32G32B32A32_SINT => Some("R32G32B32A32_SINT"), Self::R32G32B32A32_SFLOAT => Some("R32G32B32A32_SFLOAT"), Self::R64_UINT => Some("R64_UINT"), Self::R64_SINT => Some("R64_SINT"), Self::R64_SFLOAT => Some("R64_SFLOAT"), Self::R64G64_UINT => Some("R64G64_UINT"), Self::R64G64_SINT => Some("R64G64_SINT"), Self::R64G64_SFLOAT => Some("R64G64_SFLOAT"), Self::R64G64B64_UINT => Some("R64G64B64_UINT"), Self::R64G64B64_SINT => Some("R64G64B64_SINT"), Self::R64G64B64_SFLOAT => Some("R64G64B64_SFLOAT"), Self::R64G64B64A64_UINT => Some("R64G64B64A64_UINT"), Self::R64G64B64A64_SINT => Some("R64G64B64A64_SINT"), Self::R64G64B64A64_SFLOAT => Some("R64G64B64A64_SFLOAT"), Self::B10G11R11_UFLOAT_PACK32 => Some("B10G11R11_UFLOAT_PACK32"), Self::E5B9G9R9_UFLOAT_PACK32 => Some("E5B9G9R9_UFLOAT_PACK32"), Self::D16_UNORM => Some("D16_UNORM"), Self::X8_D24_UNORM_PACK32 => Some("X8_D24_UNORM_PACK32"), Self::D32_SFLOAT => Some("D32_SFLOAT"), Self::S8_UINT => Some("S8_UINT"), Self::D16_UNORM_S8_UINT => Some("D16_UNORM_S8_UINT"), Self::D24_UNORM_S8_UINT => Some("D24_UNORM_S8_UINT"), Self::D32_SFLOAT_S8_UINT => Some("D32_SFLOAT_S8_UINT"), Self::BC1_RGB_UNORM_BLOCK => Some("BC1_RGB_UNORM_BLOCK"), Self::BC1_RGB_SRGB_BLOCK => Some("BC1_RGB_SRGB_BLOCK"), Self::BC1_RGBA_UNORM_BLOCK => Some("BC1_RGBA_UNORM_BLOCK"), Self::BC1_RGBA_SRGB_BLOCK => Some("BC1_RGBA_SRGB_BLOCK"), Self::BC2_UNORM_BLOCK => Some("BC2_UNORM_BLOCK"), Self::BC2_SRGB_BLOCK => Some("BC2_SRGB_BLOCK"), Self::BC3_UNORM_BLOCK => Some("BC3_UNORM_BLOCK"), Self::BC3_SRGB_BLOCK => Some("BC3_SRGB_BLOCK"), Self::BC4_UNORM_BLOCK => Some("BC4_UNORM_BLOCK"), Self::BC4_SNORM_BLOCK => Some("BC4_SNORM_BLOCK"), Self::BC5_UNORM_BLOCK => Some("BC5_UNORM_BLOCK"), Self::BC5_SNORM_BLOCK => Some("BC5_SNORM_BLOCK"), Self::BC6H_UFLOAT_BLOCK => Some("BC6H_UFLOAT_BLOCK"), Self::BC6H_SFLOAT_BLOCK => Some("BC6H_SFLOAT_BLOCK"), Self::BC7_UNORM_BLOCK => Some("BC7_UNORM_BLOCK"), Self::BC7_SRGB_BLOCK => Some("BC7_SRGB_BLOCK"), Self::ETC2_R8G8B8_UNORM_BLOCK => Some("ETC2_R8G8B8_UNORM_BLOCK"), Self::ETC2_R8G8B8_SRGB_BLOCK => Some("ETC2_R8G8B8_SRGB_BLOCK"), Self::ETC2_R8G8B8A1_UNORM_BLOCK => Some("ETC2_R8G8B8A1_UNORM_BLOCK"), Self::ETC2_R8G8B8A1_SRGB_BLOCK => Some("ETC2_R8G8B8A1_SRGB_BLOCK"), Self::ETC2_R8G8B8A8_UNORM_BLOCK => Some("ETC2_R8G8B8A8_UNORM_BLOCK"), Self::ETC2_R8G8B8A8_SRGB_BLOCK => Some("ETC2_R8G8B8A8_SRGB_BLOCK"), Self::EAC_R11_UNORM_BLOCK => Some("EAC_R11_UNORM_BLOCK"), Self::EAC_R11_SNORM_BLOCK => Some("EAC_R11_SNORM_BLOCK"), Self::EAC_R11G11_UNORM_BLOCK => Some("EAC_R11G11_UNORM_BLOCK"), Self::EAC_R11G11_SNORM_BLOCK => Some("EAC_R11G11_SNORM_BLOCK"), Self::ASTC_4X4_UNORM_BLOCK => Some("ASTC_4X4_UNORM_BLOCK"), Self::ASTC_4X4_SRGB_BLOCK => Some("ASTC_4X4_SRGB_BLOCK"), Self::ASTC_5X4_UNORM_BLOCK => Some("ASTC_5X4_UNORM_BLOCK"), Self::ASTC_5X4_SRGB_BLOCK => Some("ASTC_5X4_SRGB_BLOCK"), Self::ASTC_5X5_UNORM_BLOCK => Some("ASTC_5X5_UNORM_BLOCK"), Self::ASTC_5X5_SRGB_BLOCK => Some("ASTC_5X5_SRGB_BLOCK"), Self::ASTC_6X5_UNORM_BLOCK => Some("ASTC_6X5_UNORM_BLOCK"), Self::ASTC_6X5_SRGB_BLOCK => Some("ASTC_6X5_SRGB_BLOCK"), Self::ASTC_6X6_UNORM_BLOCK => Some("ASTC_6X6_UNORM_BLOCK"), Self::ASTC_6X6_SRGB_BLOCK => Some("ASTC_6X6_SRGB_BLOCK"), Self::ASTC_8X5_UNORM_BLOCK => Some("ASTC_8X5_UNORM_BLOCK"), Self::ASTC_8X5_SRGB_BLOCK => Some("ASTC_8X5_SRGB_BLOCK"), Self::ASTC_8X6_UNORM_BLOCK => Some("ASTC_8X6_UNORM_BLOCK"), Self::ASTC_8X6_SRGB_BLOCK => Some("ASTC_8X6_SRGB_BLOCK"), Self::ASTC_8X8_UNORM_BLOCK => Some("ASTC_8X8_UNORM_BLOCK"), Self::ASTC_8X8_SRGB_BLOCK => Some("ASTC_8X8_SRGB_BLOCK"), Self::ASTC_10X5_UNORM_BLOCK => Some("ASTC_10X5_UNORM_BLOCK"), Self::ASTC_10X5_SRGB_BLOCK => Some("ASTC_10X5_SRGB_BLOCK"), Self::ASTC_10X6_UNORM_BLOCK => Some("ASTC_10X6_UNORM_BLOCK"), Self::ASTC_10X6_SRGB_BLOCK => Some("ASTC_10X6_SRGB_BLOCK"), Self::ASTC_10X8_UNORM_BLOCK => Some("ASTC_10X8_UNORM_BLOCK"), Self::ASTC_10X8_SRGB_BLOCK => Some("ASTC_10X8_SRGB_BLOCK"), Self::ASTC_10X10_UNORM_BLOCK => Some("ASTC_10X10_UNORM_BLOCK"), Self::ASTC_10X10_SRGB_BLOCK => Some("ASTC_10X10_SRGB_BLOCK"), Self::ASTC_12X10_UNORM_BLOCK => Some("ASTC_12X10_UNORM_BLOCK"), Self::ASTC_12X10_SRGB_BLOCK => Some("ASTC_12X10_SRGB_BLOCK"), Self::ASTC_12X12_UNORM_BLOCK => Some("ASTC_12X12_UNORM_BLOCK"), Self::ASTC_12X12_SRGB_BLOCK => Some("ASTC_12X12_SRGB_BLOCK"), Self::PVRTC1_2BPP_UNORM_BLOCK_IMG => Some("PVRTC1_2BPP_UNORM_BLOCK_IMG"), Self::PVRTC1_4BPP_UNORM_BLOCK_IMG => Some("PVRTC1_4BPP_UNORM_BLOCK_IMG"), Self::PVRTC2_2BPP_UNORM_BLOCK_IMG => Some("PVRTC2_2BPP_UNORM_BLOCK_IMG"), Self::PVRTC2_4BPP_UNORM_BLOCK_IMG => Some("PVRTC2_4BPP_UNORM_BLOCK_IMG"), Self::PVRTC1_2BPP_SRGB_BLOCK_IMG => Some("PVRTC1_2BPP_SRGB_BLOCK_IMG"), Self::PVRTC1_4BPP_SRGB_BLOCK_IMG => Some("PVRTC1_4BPP_SRGB_BLOCK_IMG"), Self::PVRTC2_2BPP_SRGB_BLOCK_IMG => Some("PVRTC2_2BPP_SRGB_BLOCK_IMG"), Self::PVRTC2_4BPP_SRGB_BLOCK_IMG => Some("PVRTC2_4BPP_SRGB_BLOCK_IMG"), Self::R16G16_S10_5_NV => Some("R16G16_S10_5_NV"), Self::A1B5G5R5_UNORM_PACK16_KHR => Some("A1B5G5R5_UNORM_PACK16_KHR"), Self::A8_UNORM_KHR => Some("A8_UNORM_KHR"), Self::G8B8G8R8_422_UNORM => Some("G8B8G8R8_422_UNORM"), Self::B8G8R8G8_422_UNORM => Some("B8G8R8G8_422_UNORM"), Self::G8_B8_R8_3PLANE_420_UNORM => Some("G8_B8_R8_3PLANE_420_UNORM"), Self::G8_B8R8_2PLANE_420_UNORM => Some("G8_B8R8_2PLANE_420_UNORM"), Self::G8_B8_R8_3PLANE_422_UNORM => Some("G8_B8_R8_3PLANE_422_UNORM"), Self::G8_B8R8_2PLANE_422_UNORM => Some("G8_B8R8_2PLANE_422_UNORM"), Self::G8_B8_R8_3PLANE_444_UNORM => Some("G8_B8_R8_3PLANE_444_UNORM"), Self::R10X6_UNORM_PACK16 => Some("R10X6_UNORM_PACK16"), Self::R10X6G10X6_UNORM_2PACK16 => Some("R10X6G10X6_UNORM_2PACK16"), Self::R10X6G10X6B10X6A10X6_UNORM_4PACK16 => Some("R10X6G10X6B10X6A10X6_UNORM_4PACK16"), Self::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 => { Some("G10X6B10X6G10X6R10X6_422_UNORM_4PACK16") } Self::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 => { Some("B10X6G10X6R10X6G10X6_422_UNORM_4PACK16") } Self::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 => { Some("G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16") } Self::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 => { Some("G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16") } Self::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 => { Some("G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16") } Self::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 => { Some("G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16") } Self::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 => { Some("G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16") } Self::R12X4_UNORM_PACK16 => Some("R12X4_UNORM_PACK16"), Self::R12X4G12X4_UNORM_2PACK16 => Some("R12X4G12X4_UNORM_2PACK16"), Self::R12X4G12X4B12X4A12X4_UNORM_4PACK16 => Some("R12X4G12X4B12X4A12X4_UNORM_4PACK16"), Self::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 => { Some("G12X4B12X4G12X4R12X4_422_UNORM_4PACK16") } Self::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 => { Some("B12X4G12X4R12X4G12X4_422_UNORM_4PACK16") } Self::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 => { Some("G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16") } Self::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 => { Some("G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16") } Self::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 => { Some("G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16") } Self::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 => { Some("G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16") } Self::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 => { Some("G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16") } Self::G16B16G16R16_422_UNORM => Some("G16B16G16R16_422_UNORM"), Self::B16G16R16G16_422_UNORM => Some("B16G16R16G16_422_UNORM"), Self::G16_B16_R16_3PLANE_420_UNORM => Some("G16_B16_R16_3PLANE_420_UNORM"), Self::G16_B16R16_2PLANE_420_UNORM => Some("G16_B16R16_2PLANE_420_UNORM"), Self::G16_B16_R16_3PLANE_422_UNORM => Some("G16_B16_R16_3PLANE_422_UNORM"), Self::G16_B16R16_2PLANE_422_UNORM => Some("G16_B16R16_2PLANE_422_UNORM"), Self::G16_B16_R16_3PLANE_444_UNORM => Some("G16_B16_R16_3PLANE_444_UNORM"), Self::G8_B8R8_2PLANE_444_UNORM => Some("G8_B8R8_2PLANE_444_UNORM"), Self::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 => { Some("G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16") } Self::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 => { Some("G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16") } Self::G16_B16R16_2PLANE_444_UNORM => Some("G16_B16R16_2PLANE_444_UNORM"), Self::A4R4G4B4_UNORM_PACK16 => Some("A4R4G4B4_UNORM_PACK16"), Self::A4B4G4R4_UNORM_PACK16 => Some("A4B4G4R4_UNORM_PACK16"), Self::ASTC_4X4_SFLOAT_BLOCK => Some("ASTC_4X4_SFLOAT_BLOCK"), Self::ASTC_5X4_SFLOAT_BLOCK => Some("ASTC_5X4_SFLOAT_BLOCK"), Self::ASTC_5X5_SFLOAT_BLOCK => Some("ASTC_5X5_SFLOAT_BLOCK"), Self::ASTC_6X5_SFLOAT_BLOCK => Some("ASTC_6X5_SFLOAT_BLOCK"), Self::ASTC_6X6_SFLOAT_BLOCK => Some("ASTC_6X6_SFLOAT_BLOCK"), Self::ASTC_8X5_SFLOAT_BLOCK => Some("ASTC_8X5_SFLOAT_BLOCK"), Self::ASTC_8X6_SFLOAT_BLOCK => Some("ASTC_8X6_SFLOAT_BLOCK"), Self::ASTC_8X8_SFLOAT_BLOCK => Some("ASTC_8X8_SFLOAT_BLOCK"), Self::ASTC_10X5_SFLOAT_BLOCK => Some("ASTC_10X5_SFLOAT_BLOCK"), Self::ASTC_10X6_SFLOAT_BLOCK => Some("ASTC_10X6_SFLOAT_BLOCK"), Self::ASTC_10X8_SFLOAT_BLOCK => Some("ASTC_10X8_SFLOAT_BLOCK"), Self::ASTC_10X10_SFLOAT_BLOCK => Some("ASTC_10X10_SFLOAT_BLOCK"), Self::ASTC_12X10_SFLOAT_BLOCK => Some("ASTC_12X10_SFLOAT_BLOCK"), Self::ASTC_12X12_SFLOAT_BLOCK => Some("ASTC_12X12_SFLOAT_BLOCK"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for FormatFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN : & [(Flags , & str)] = & [(FormatFeatureFlags :: SAMPLED_IMAGE . 0 , "SAMPLED_IMAGE") , (FormatFeatureFlags :: STORAGE_IMAGE . 0 , "STORAGE_IMAGE") , (FormatFeatureFlags :: STORAGE_IMAGE_ATOMIC . 0 , "STORAGE_IMAGE_ATOMIC") , (FormatFeatureFlags :: UNIFORM_TEXEL_BUFFER . 0 , "UNIFORM_TEXEL_BUFFER") , (FormatFeatureFlags :: STORAGE_TEXEL_BUFFER . 0 , "STORAGE_TEXEL_BUFFER") , (FormatFeatureFlags :: STORAGE_TEXEL_BUFFER_ATOMIC . 0 , "STORAGE_TEXEL_BUFFER_ATOMIC") , (FormatFeatureFlags :: VERTEX_BUFFER . 0 , "VERTEX_BUFFER") , (FormatFeatureFlags :: COLOR_ATTACHMENT . 0 , "COLOR_ATTACHMENT") , (FormatFeatureFlags :: COLOR_ATTACHMENT_BLEND . 0 , "COLOR_ATTACHMENT_BLEND") , (FormatFeatureFlags :: DEPTH_STENCIL_ATTACHMENT . 0 , "DEPTH_STENCIL_ATTACHMENT") , (FormatFeatureFlags :: BLIT_SRC . 0 , "BLIT_SRC") , (FormatFeatureFlags :: BLIT_DST . 0 , "BLIT_DST") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_LINEAR . 0 , "SAMPLED_IMAGE_FILTER_LINEAR") , (FormatFeatureFlags :: VIDEO_DECODE_OUTPUT_KHR . 0 , "VIDEO_DECODE_OUTPUT_KHR") , (FormatFeatureFlags :: VIDEO_DECODE_DPB_KHR . 0 , "VIDEO_DECODE_DPB_KHR") , (FormatFeatureFlags :: ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR . 0 , "ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_CUBIC_EXT . 0 , "SAMPLED_IMAGE_FILTER_CUBIC_EXT") , (FormatFeatureFlags :: FRAGMENT_DENSITY_MAP_EXT . 0 , "FRAGMENT_DENSITY_MAP_EXT") , (FormatFeatureFlags :: FRAGMENT_SHADING_RATE_ATTACHMENT_KHR . 0 , "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR") , (FormatFeatureFlags :: VIDEO_ENCODE_INPUT_KHR . 0 , "VIDEO_ENCODE_INPUT_KHR") , (FormatFeatureFlags :: VIDEO_ENCODE_DPB_KHR . 0 , "VIDEO_ENCODE_DPB_KHR") , (FormatFeatureFlags :: TRANSFER_SRC . 0 , "TRANSFER_SRC") , (FormatFeatureFlags :: TRANSFER_DST . 0 , "TRANSFER_DST") , (FormatFeatureFlags :: MIDPOINT_CHROMA_SAMPLES . 0 , "MIDPOINT_CHROMA_SAMPLES") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE") , (FormatFeatureFlags :: DISJOINT . 0 , "DISJOINT") , (FormatFeatureFlags :: COSITED_CHROMA_SAMPLES . 0 , "COSITED_CHROMA_SAMPLES") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_MINMAX . 0 , "SAMPLED_IMAGE_FILTER_MINMAX")] ; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FormatFeatureFlags2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN : & [(Flags64 , & str)] = & [(FormatFeatureFlags2 :: SAMPLED_IMAGE . 0 , "SAMPLED_IMAGE") , (FormatFeatureFlags2 :: STORAGE_IMAGE . 0 , "STORAGE_IMAGE") , (FormatFeatureFlags2 :: STORAGE_IMAGE_ATOMIC . 0 , "STORAGE_IMAGE_ATOMIC") , (FormatFeatureFlags2 :: UNIFORM_TEXEL_BUFFER . 0 , "UNIFORM_TEXEL_BUFFER") , (FormatFeatureFlags2 :: STORAGE_TEXEL_BUFFER . 0 , "STORAGE_TEXEL_BUFFER") , (FormatFeatureFlags2 :: STORAGE_TEXEL_BUFFER_ATOMIC . 0 , "STORAGE_TEXEL_BUFFER_ATOMIC") , (FormatFeatureFlags2 :: VERTEX_BUFFER . 0 , "VERTEX_BUFFER") , (FormatFeatureFlags2 :: COLOR_ATTACHMENT . 0 , "COLOR_ATTACHMENT") , (FormatFeatureFlags2 :: COLOR_ATTACHMENT_BLEND . 0 , "COLOR_ATTACHMENT_BLEND") , (FormatFeatureFlags2 :: DEPTH_STENCIL_ATTACHMENT . 0 , "DEPTH_STENCIL_ATTACHMENT") , (FormatFeatureFlags2 :: BLIT_SRC . 0 , "BLIT_SRC") , (FormatFeatureFlags2 :: BLIT_DST . 0 , "BLIT_DST") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_LINEAR . 0 , "SAMPLED_IMAGE_FILTER_LINEAR") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_CUBIC . 0 , "SAMPLED_IMAGE_FILTER_CUBIC") , (FormatFeatureFlags2 :: TRANSFER_SRC . 0 , "TRANSFER_SRC") , (FormatFeatureFlags2 :: TRANSFER_DST . 0 , "TRANSFER_DST") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_MINMAX . 0 , "SAMPLED_IMAGE_FILTER_MINMAX") , (FormatFeatureFlags2 :: MIDPOINT_CHROMA_SAMPLES . 0 , "MIDPOINT_CHROMA_SAMPLES") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE") , (FormatFeatureFlags2 :: DISJOINT . 0 , "DISJOINT") , (FormatFeatureFlags2 :: COSITED_CHROMA_SAMPLES . 0 , "COSITED_CHROMA_SAMPLES") , (FormatFeatureFlags2 :: STORAGE_READ_WITHOUT_FORMAT . 0 , "STORAGE_READ_WITHOUT_FORMAT") , (FormatFeatureFlags2 :: STORAGE_WRITE_WITHOUT_FORMAT . 0 , "STORAGE_WRITE_WITHOUT_FORMAT") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_DEPTH_COMPARISON . 0 , "SAMPLED_IMAGE_DEPTH_COMPARISON") , (FormatFeatureFlags2 :: VIDEO_DECODE_OUTPUT_KHR . 0 , "VIDEO_DECODE_OUTPUT_KHR") , (FormatFeatureFlags2 :: VIDEO_DECODE_DPB_KHR . 0 , "VIDEO_DECODE_DPB_KHR") , (FormatFeatureFlags2 :: ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR . 0 , "ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR") , (FormatFeatureFlags2 :: FRAGMENT_DENSITY_MAP_EXT . 0 , "FRAGMENT_DENSITY_MAP_EXT") , (FormatFeatureFlags2 :: FRAGMENT_SHADING_RATE_ATTACHMENT_KHR . 0 , "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR") , (FormatFeatureFlags2 :: HOST_IMAGE_TRANSFER_EXT . 0 , "HOST_IMAGE_TRANSFER_EXT") , (FormatFeatureFlags2 :: VIDEO_ENCODE_INPUT_KHR . 0 , "VIDEO_ENCODE_INPUT_KHR") , (FormatFeatureFlags2 :: VIDEO_ENCODE_DPB_KHR . 0 , "VIDEO_ENCODE_DPB_KHR") , (FormatFeatureFlags2 :: LINEAR_COLOR_ATTACHMENT_NV . 0 , "LINEAR_COLOR_ATTACHMENT_NV") , (FormatFeatureFlags2 :: WEIGHT_IMAGE_QCOM . 0 , "WEIGHT_IMAGE_QCOM") , (FormatFeatureFlags2 :: WEIGHT_SAMPLED_IMAGE_QCOM . 0 , "WEIGHT_SAMPLED_IMAGE_QCOM") , (FormatFeatureFlags2 :: BLOCK_MATCHING_QCOM . 0 , "BLOCK_MATCHING_QCOM") , (FormatFeatureFlags2 :: BOX_FILTER_SAMPLED_QCOM . 0 , "BOX_FILTER_SAMPLED_QCOM") , (FormatFeatureFlags2 :: OPTICAL_FLOW_IMAGE_NV . 0 , "OPTICAL_FLOW_IMAGE_NV") , (FormatFeatureFlags2 :: OPTICAL_FLOW_VECTOR_NV . 0 , "OPTICAL_FLOW_VECTOR_NV") , (FormatFeatureFlags2 :: OPTICAL_FLOW_COST_NV . 0 , "OPTICAL_FLOW_COST_NV")] ; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FragmentShadingRateCombinerOpKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::KEEP => Some("KEEP"), Self::REPLACE => Some("REPLACE"), Self::MIN => Some("MIN"), Self::MAX => Some("MAX"), Self::MUL => Some("MUL"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for FragmentShadingRateNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_1_INVOCATION_PER_PIXEL => Some("TYPE_1_INVOCATION_PER_PIXEL"), Self::TYPE_1_INVOCATION_PER_1X2_PIXELS => Some("TYPE_1_INVOCATION_PER_1X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_2X1_PIXELS => Some("TYPE_1_INVOCATION_PER_2X1_PIXELS"), Self::TYPE_1_INVOCATION_PER_2X2_PIXELS => Some("TYPE_1_INVOCATION_PER_2X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_2X4_PIXELS => Some("TYPE_1_INVOCATION_PER_2X4_PIXELS"), Self::TYPE_1_INVOCATION_PER_4X2_PIXELS => Some("TYPE_1_INVOCATION_PER_4X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_4X4_PIXELS => Some("TYPE_1_INVOCATION_PER_4X4_PIXELS"), Self::TYPE_2_INVOCATIONS_PER_PIXEL => Some("TYPE_2_INVOCATIONS_PER_PIXEL"), Self::TYPE_4_INVOCATIONS_PER_PIXEL => Some("TYPE_4_INVOCATIONS_PER_PIXEL"), Self::TYPE_8_INVOCATIONS_PER_PIXEL => Some("TYPE_8_INVOCATIONS_PER_PIXEL"), Self::TYPE_16_INVOCATIONS_PER_PIXEL => Some("TYPE_16_INVOCATIONS_PER_PIXEL"), Self::NO_INVOCATIONS => Some("NO_INVOCATIONS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for FragmentShadingRateTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FRAGMENT_SIZE => Some("FRAGMENT_SIZE"), Self::ENUMS => Some("ENUMS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for FrameBoundaryFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(FrameBoundaryFlagsEXT::FRAME_END.0, "FRAME_END")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FramebufferCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(FramebufferCreateFlags::IMAGELESS.0, "IMAGELESS")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for FrontFace { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COUNTER_CLOCKWISE => Some("COUNTER_CLOCKWISE"), Self::CLOCKWISE => Some("CLOCKWISE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for FullScreenExclusiveEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEFAULT => Some("DEFAULT"), Self::ALLOWED => Some("ALLOWED"), Self::DISALLOWED => Some("DISALLOWED"), Self::APPLICATION_CONTROLLED => Some("APPLICATION_CONTROLLED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for GeometryFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (GeometryFlagsKHR::OPAQUE.0, "OPAQUE"), ( GeometryFlagsKHR::NO_DUPLICATE_ANY_HIT_INVOCATION.0, "NO_DUPLICATE_ANY_HIT_INVOCATION", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for GeometryInstanceFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( GeometryInstanceFlagsKHR::TRIANGLE_FACING_CULL_DISABLE.0, "TRIANGLE_FACING_CULL_DISABLE", ), ( GeometryInstanceFlagsKHR::TRIANGLE_FLIP_FACING.0, "TRIANGLE_FLIP_FACING", ), (GeometryInstanceFlagsKHR::FORCE_OPAQUE.0, "FORCE_OPAQUE"), ( GeometryInstanceFlagsKHR::FORCE_NO_OPAQUE.0, "FORCE_NO_OPAQUE", ), ( GeometryInstanceFlagsKHR::FORCE_OPACITY_MICROMAP_2_STATE_EXT.0, "FORCE_OPACITY_MICROMAP_2_STATE_EXT", ), ( GeometryInstanceFlagsKHR::DISABLE_OPACITY_MICROMAPS_EXT.0, "DISABLE_OPACITY_MICROMAPS_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for GeometryTypeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TRIANGLES => Some("TRIANGLES"), Self::AABBS => Some("AABBS"), Self::INSTANCES => Some("INSTANCES"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for GraphicsPipelineLibraryFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( GraphicsPipelineLibraryFlagsEXT::VERTEX_INPUT_INTERFACE.0, "VERTEX_INPUT_INTERFACE", ), ( GraphicsPipelineLibraryFlagsEXT::PRE_RASTERIZATION_SHADERS.0, "PRE_RASTERIZATION_SHADERS", ), ( GraphicsPipelineLibraryFlagsEXT::FRAGMENT_SHADER.0, "FRAGMENT_SHADER", ), ( GraphicsPipelineLibraryFlagsEXT::FRAGMENT_OUTPUT_INTERFACE.0, "FRAGMENT_OUTPUT_INTERFACE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for HeadlessSurfaceCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for HostImageCopyFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(HostImageCopyFlagsEXT::MEMCPY.0, "MEMCPY")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for IOSSurfaceCreateFlagsMVK { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageAspectFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ImageAspectFlags::COLOR.0, "COLOR"), (ImageAspectFlags::DEPTH.0, "DEPTH"), (ImageAspectFlags::STENCIL.0, "STENCIL"), (ImageAspectFlags::METADATA.0, "METADATA"), (ImageAspectFlags::MEMORY_PLANE_0_EXT.0, "MEMORY_PLANE_0_EXT"), (ImageAspectFlags::MEMORY_PLANE_1_EXT.0, "MEMORY_PLANE_1_EXT"), (ImageAspectFlags::MEMORY_PLANE_2_EXT.0, "MEMORY_PLANE_2_EXT"), (ImageAspectFlags::MEMORY_PLANE_3_EXT.0, "MEMORY_PLANE_3_EXT"), (ImageAspectFlags::PLANE_0.0, "PLANE_0"), (ImageAspectFlags::PLANE_1.0, "PLANE_1"), (ImageAspectFlags::PLANE_2.0, "PLANE_2"), (ImageAspectFlags::NONE.0, "NONE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageCompressionFixedRateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ImageCompressionFixedRateFlagsEXT::NONE.0, "NONE"), (ImageCompressionFixedRateFlagsEXT::TYPE_1BPC.0, "TYPE_1BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_2BPC.0, "TYPE_2BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_3BPC.0, "TYPE_3BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_4BPC.0, "TYPE_4BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_5BPC.0, "TYPE_5BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_6BPC.0, "TYPE_6BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_7BPC.0, "TYPE_7BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_8BPC.0, "TYPE_8BPC"), (ImageCompressionFixedRateFlagsEXT::TYPE_9BPC.0, "TYPE_9BPC"), ( ImageCompressionFixedRateFlagsEXT::TYPE_10BPC.0, "TYPE_10BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_11BPC.0, "TYPE_11BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_12BPC.0, "TYPE_12BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_13BPC.0, "TYPE_13BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_14BPC.0, "TYPE_14BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_15BPC.0, "TYPE_15BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_16BPC.0, "TYPE_16BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_17BPC.0, "TYPE_17BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_18BPC.0, "TYPE_18BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_19BPC.0, "TYPE_19BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_20BPC.0, "TYPE_20BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_21BPC.0, "TYPE_21BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_22BPC.0, "TYPE_22BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_23BPC.0, "TYPE_23BPC", ), ( ImageCompressionFixedRateFlagsEXT::TYPE_24BPC.0, "TYPE_24BPC", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageCompressionFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ImageCompressionFlagsEXT::DEFAULT.0, "DEFAULT"), ( ImageCompressionFlagsEXT::FIXED_RATE_DEFAULT.0, "FIXED_RATE_DEFAULT", ), ( ImageCompressionFlagsEXT::FIXED_RATE_EXPLICIT.0, "FIXED_RATE_EXPLICIT", ), (ImageCompressionFlagsEXT::DISABLED.0, "DISABLED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageConstraintsInfoFlagsFUCHSIA { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ImageConstraintsInfoFlagsFUCHSIA::CPU_READ_RARELY.0, "CPU_READ_RARELY", ), ( ImageConstraintsInfoFlagsFUCHSIA::CPU_READ_OFTEN.0, "CPU_READ_OFTEN", ), ( ImageConstraintsInfoFlagsFUCHSIA::CPU_WRITE_RARELY.0, "CPU_WRITE_RARELY", ), ( ImageConstraintsInfoFlagsFUCHSIA::CPU_WRITE_OFTEN.0, "CPU_WRITE_OFTEN", ), ( ImageConstraintsInfoFlagsFUCHSIA::PROTECTED_OPTIONAL.0, "PROTECTED_OPTIONAL", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ImageCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"), (ImageCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"), (ImageCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"), (ImageCreateFlags::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"), (ImageCreateFlags::CUBE_COMPATIBLE.0, "CUBE_COMPATIBLE"), (ImageCreateFlags::CORNER_SAMPLED_NV.0, "CORNER_SAMPLED_NV"), ( ImageCreateFlags::SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT.0, "SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT", ), (ImageCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"), ( ImageCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0, "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT", ), ( ImageCreateFlags::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT.0, "MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT", ), ( ImageCreateFlags::TYPE_2D_VIEW_COMPATIBLE_EXT.0, "TYPE_2D_VIEW_COMPATIBLE_EXT", ), ( ImageCreateFlags::FRAGMENT_DENSITY_MAP_OFFSET_QCOM.0, "FRAGMENT_DENSITY_MAP_OFFSET_QCOM", ), ( ImageCreateFlags::VIDEO_PROFILE_INDEPENDENT_KHR.0, "VIDEO_PROFILE_INDEPENDENT_KHR", ), (ImageCreateFlags::ALIAS.0, "ALIAS"), ( ImageCreateFlags::SPLIT_INSTANCE_BIND_REGIONS.0, "SPLIT_INSTANCE_BIND_REGIONS", ), ( ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE.0, "TYPE_2D_ARRAY_COMPATIBLE", ), ( ImageCreateFlags::BLOCK_TEXEL_VIEW_COMPATIBLE.0, "BLOCK_TEXEL_VIEW_COMPATIBLE", ), (ImageCreateFlags::EXTENDED_USAGE.0, "EXTENDED_USAGE"), (ImageCreateFlags::PROTECTED.0, "PROTECTED"), (ImageCreateFlags::DISJOINT.0, "DISJOINT"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageFormatConstraintsFlagsFUCHSIA { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageLayout { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNDEFINED => Some("UNDEFINED"), Self::GENERAL => Some("GENERAL"), Self::COLOR_ATTACHMENT_OPTIMAL => Some("COLOR_ATTACHMENT_OPTIMAL"), Self::DEPTH_STENCIL_ATTACHMENT_OPTIMAL => Some("DEPTH_STENCIL_ATTACHMENT_OPTIMAL"), Self::DEPTH_STENCIL_READ_ONLY_OPTIMAL => Some("DEPTH_STENCIL_READ_ONLY_OPTIMAL"), Self::SHADER_READ_ONLY_OPTIMAL => Some("SHADER_READ_ONLY_OPTIMAL"), Self::TRANSFER_SRC_OPTIMAL => Some("TRANSFER_SRC_OPTIMAL"), Self::TRANSFER_DST_OPTIMAL => Some("TRANSFER_DST_OPTIMAL"), Self::PREINITIALIZED => Some("PREINITIALIZED"), Self::PRESENT_SRC_KHR => Some("PRESENT_SRC_KHR"), Self::VIDEO_DECODE_DST_KHR => Some("VIDEO_DECODE_DST_KHR"), Self::VIDEO_DECODE_SRC_KHR => Some("VIDEO_DECODE_SRC_KHR"), Self::VIDEO_DECODE_DPB_KHR => Some("VIDEO_DECODE_DPB_KHR"), Self::SHARED_PRESENT_KHR => Some("SHARED_PRESENT_KHR"), Self::FRAGMENT_DENSITY_MAP_OPTIMAL_EXT => Some("FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"), Self::FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR => { Some("FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR") } Self::RENDERING_LOCAL_READ_KHR => Some("RENDERING_LOCAL_READ_KHR"), Self::VIDEO_ENCODE_DST_KHR => Some("VIDEO_ENCODE_DST_KHR"), Self::VIDEO_ENCODE_SRC_KHR => Some("VIDEO_ENCODE_SRC_KHR"), Self::VIDEO_ENCODE_DPB_KHR => Some("VIDEO_ENCODE_DPB_KHR"), Self::ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT => { Some("ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT") } Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL => { Some("DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL") } Self::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL => { Some("DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL") } Self::DEPTH_ATTACHMENT_OPTIMAL => Some("DEPTH_ATTACHMENT_OPTIMAL"), Self::DEPTH_READ_ONLY_OPTIMAL => Some("DEPTH_READ_ONLY_OPTIMAL"), Self::STENCIL_ATTACHMENT_OPTIMAL => Some("STENCIL_ATTACHMENT_OPTIMAL"), Self::STENCIL_READ_ONLY_OPTIMAL => Some("STENCIL_READ_ONLY_OPTIMAL"), Self::READ_ONLY_OPTIMAL => Some("READ_ONLY_OPTIMAL"), Self::ATTACHMENT_OPTIMAL => Some("ATTACHMENT_OPTIMAL"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ImagePipeSurfaceCreateFlagsFUCHSIA { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageTiling { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OPTIMAL => Some("OPTIMAL"), Self::LINEAR => Some("LINEAR"), Self::DRM_FORMAT_MODIFIER_EXT => Some("DRM_FORMAT_MODIFIER_EXT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ImageType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_1D => Some("TYPE_1D"), Self::TYPE_2D => Some("TYPE_2D"), Self::TYPE_3D => Some("TYPE_3D"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ImageUsageFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ImageUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"), (ImageUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"), (ImageUsageFlags::SAMPLED.0, "SAMPLED"), (ImageUsageFlags::STORAGE.0, "STORAGE"), (ImageUsageFlags::COLOR_ATTACHMENT.0, "COLOR_ATTACHMENT"), ( ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT.0, "DEPTH_STENCIL_ATTACHMENT", ), ( ImageUsageFlags::TRANSIENT_ATTACHMENT.0, "TRANSIENT_ATTACHMENT", ), (ImageUsageFlags::INPUT_ATTACHMENT.0, "INPUT_ATTACHMENT"), ( ImageUsageFlags::VIDEO_DECODE_DST_KHR.0, "VIDEO_DECODE_DST_KHR", ), ( ImageUsageFlags::VIDEO_DECODE_SRC_KHR.0, "VIDEO_DECODE_SRC_KHR", ), ( ImageUsageFlags::VIDEO_DECODE_DPB_KHR.0, "VIDEO_DECODE_DPB_KHR", ), ( ImageUsageFlags::FRAGMENT_DENSITY_MAP_EXT.0, "FRAGMENT_DENSITY_MAP_EXT", ), ( ImageUsageFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0, "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR", ), (ImageUsageFlags::HOST_TRANSFER_EXT.0, "HOST_TRANSFER_EXT"), ( ImageUsageFlags::VIDEO_ENCODE_DST_KHR.0, "VIDEO_ENCODE_DST_KHR", ), ( ImageUsageFlags::VIDEO_ENCODE_SRC_KHR.0, "VIDEO_ENCODE_SRC_KHR", ), ( ImageUsageFlags::VIDEO_ENCODE_DPB_KHR.0, "VIDEO_ENCODE_DPB_KHR", ), ( ImageUsageFlags::ATTACHMENT_FEEDBACK_LOOP_EXT.0, "ATTACHMENT_FEEDBACK_LOOP_EXT", ), ( ImageUsageFlags::INVOCATION_MASK_HUAWEI.0, "INVOCATION_MASK_HUAWEI", ), (ImageUsageFlags::SAMPLE_WEIGHT_QCOM.0, "SAMPLE_WEIGHT_QCOM"), ( ImageUsageFlags::SAMPLE_BLOCK_MATCH_QCOM.0, "SAMPLE_BLOCK_MATCH_QCOM", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageViewCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( ImageViewCreateFlags::FRAGMENT_DENSITY_MAP_DYNAMIC_EXT.0, "FRAGMENT_DENSITY_MAP_DYNAMIC_EXT", ), ( ImageViewCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0, "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT", ), ( ImageViewCreateFlags::FRAGMENT_DENSITY_MAP_DEFERRED_EXT.0, "FRAGMENT_DENSITY_MAP_DEFERRED_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ImageViewType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_1D => Some("TYPE_1D"), Self::TYPE_2D => Some("TYPE_2D"), Self::TYPE_3D => Some("TYPE_3D"), Self::CUBE => Some("CUBE"), Self::TYPE_1D_ARRAY => Some("TYPE_1D_ARRAY"), Self::TYPE_2D_ARRAY => Some("TYPE_2D_ARRAY"), Self::CUBE_ARRAY => Some("CUBE_ARRAY"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for IndexType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UINT16 => Some("UINT16"), Self::UINT32 => Some("UINT32"), Self::NONE_KHR => Some("NONE_KHR"), Self::UINT8_KHR => Some("UINT8_KHR"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for IndirectCommandsLayoutUsageFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( IndirectCommandsLayoutUsageFlagsNV::EXPLICIT_PREPROCESS.0, "EXPLICIT_PREPROCESS", ), ( IndirectCommandsLayoutUsageFlagsNV::INDEXED_SEQUENCES.0, "INDEXED_SEQUENCES", ), ( IndirectCommandsLayoutUsageFlagsNV::UNORDERED_SEQUENCES.0, "UNORDERED_SEQUENCES", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for IndirectCommandsTokenTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::SHADER_GROUP => Some("SHADER_GROUP"), Self::STATE_FLAGS => Some("STATE_FLAGS"), Self::INDEX_BUFFER => Some("INDEX_BUFFER"), Self::VERTEX_BUFFER => Some("VERTEX_BUFFER"), Self::PUSH_CONSTANT => Some("PUSH_CONSTANT"), Self::DRAW_INDEXED => Some("DRAW_INDEXED"), Self::DRAW => Some("DRAW"), Self::DRAW_TASKS => Some("DRAW_TASKS"), Self::DRAW_MESH_TASKS => Some("DRAW_MESH_TASKS"), Self::PIPELINE => Some("PIPELINE"), Self::DISPATCH => Some("DISPATCH"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for IndirectStateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(IndirectStateFlagsNV::FLAG_FRONTFACE.0, "FLAG_FRONTFACE")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for InstanceCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR.0, "ENUMERATE_PORTABILITY_KHR", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for InternalAllocationType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::EXECUTABLE => Some("EXECUTABLE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for LatencyMarkerNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::SIMULATION_START => Some("SIMULATION_START"), Self::SIMULATION_END => Some("SIMULATION_END"), Self::RENDERSUBMIT_START => Some("RENDERSUBMIT_START"), Self::RENDERSUBMIT_END => Some("RENDERSUBMIT_END"), Self::PRESENT_START => Some("PRESENT_START"), Self::PRESENT_END => Some("PRESENT_END"), Self::INPUT_SAMPLE => Some("INPUT_SAMPLE"), Self::TRIGGER_FLASH => Some("TRIGGER_FLASH"), Self::OUT_OF_BAND_RENDERSUBMIT_START => Some("OUT_OF_BAND_RENDERSUBMIT_START"), Self::OUT_OF_BAND_RENDERSUBMIT_END => Some("OUT_OF_BAND_RENDERSUBMIT_END"), Self::OUT_OF_BAND_PRESENT_START => Some("OUT_OF_BAND_PRESENT_START"), Self::OUT_OF_BAND_PRESENT_END => Some("OUT_OF_BAND_PRESENT_END"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for LayerSettingTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BOOL32 => Some("BOOL32"), Self::INT32 => Some("INT32"), Self::INT64 => Some("INT64"), Self::UINT32 => Some("UINT32"), Self::UINT64 => Some("UINT64"), Self::FLOAT32 => Some("FLOAT32"), Self::FLOAT64 => Some("FLOAT64"), Self::STRING => Some("STRING"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for LayeredDriverUnderlyingApiMSFT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NONE => Some("NONE"), Self::D3D12 => Some("D3D12"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for LineRasterizationModeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEFAULT => Some("DEFAULT"), Self::RECTANGULAR => Some("RECTANGULAR"), Self::BRESENHAM => Some("BRESENHAM"), Self::RECTANGULAR_SMOOTH => Some("RECTANGULAR_SMOOTH"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for LogicOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::CLEAR => Some("CLEAR"), Self::AND => Some("AND"), Self::AND_REVERSE => Some("AND_REVERSE"), Self::COPY => Some("COPY"), Self::AND_INVERTED => Some("AND_INVERTED"), Self::NO_OP => Some("NO_OP"), Self::XOR => Some("XOR"), Self::OR => Some("OR"), Self::NOR => Some("NOR"), Self::EQUIVALENT => Some("EQUIVALENT"), Self::INVERT => Some("INVERT"), Self::OR_REVERSE => Some("OR_REVERSE"), Self::COPY_INVERTED => Some("COPY_INVERTED"), Self::OR_INVERTED => Some("OR_INVERTED"), Self::NAND => Some("NAND"), Self::SET => Some("SET"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for MacOSSurfaceCreateFlagsMVK { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryAllocateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (MemoryAllocateFlags::DEVICE_MASK.0, "DEVICE_MASK"), (MemoryAllocateFlags::DEVICE_ADDRESS.0, "DEVICE_ADDRESS"), ( MemoryAllocateFlags::DEVICE_ADDRESS_CAPTURE_REPLAY.0, "DEVICE_ADDRESS_CAPTURE_REPLAY", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryDecompressionMethodFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[( MemoryDecompressionMethodFlagsNV::GDEFLATE_1_0.0, "GDEFLATE_1_0", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryHeapFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (MemoryHeapFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"), (MemoryHeapFlags::MULTI_INSTANCE.0, "MULTI_INSTANCE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryMapFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(MemoryMapFlags::PLACED_EXT.0, "PLACED_EXT")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryOverallocationBehaviorAMD { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEFAULT => Some("DEFAULT"), Self::ALLOWED => Some("ALLOWED"), Self::DISALLOWED => Some("DISALLOWED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for MemoryPropertyFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (MemoryPropertyFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"), (MemoryPropertyFlags::HOST_VISIBLE.0, "HOST_VISIBLE"), (MemoryPropertyFlags::HOST_COHERENT.0, "HOST_COHERENT"), (MemoryPropertyFlags::HOST_CACHED.0, "HOST_CACHED"), (MemoryPropertyFlags::LAZILY_ALLOCATED.0, "LAZILY_ALLOCATED"), ( MemoryPropertyFlags::DEVICE_COHERENT_AMD.0, "DEVICE_COHERENT_AMD", ), ( MemoryPropertyFlags::DEVICE_UNCACHED_AMD.0, "DEVICE_UNCACHED_AMD", ), (MemoryPropertyFlags::RDMA_CAPABLE_NV.0, "RDMA_CAPABLE_NV"), (MemoryPropertyFlags::PROTECTED.0, "PROTECTED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MemoryUnmapFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(MemoryUnmapFlagsKHR::RESERVE_EXT.0, "RESERVE_EXT")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MetalSurfaceCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MicromapCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( MicromapCreateFlagsEXT::DEVICE_ADDRESS_CAPTURE_REPLAY.0, "DEVICE_ADDRESS_CAPTURE_REPLAY", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for MicromapTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OPACITY_MICROMAP => Some("OPACITY_MICROMAP"), Self::DISPLACEMENT_MICROMAP_NV => Some("DISPLACEMENT_MICROMAP_NV"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for OpacityMicromapFormatEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_2_STATE => Some("TYPE_2_STATE"), Self::TYPE_4_STATE => Some("TYPE_4_STATE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for OpacityMicromapSpecialIndexEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FULLY_TRANSPARENT => Some("FULLY_TRANSPARENT"), Self::FULLY_OPAQUE => Some("FULLY_OPAQUE"), Self::FULLY_UNKNOWN_TRANSPARENT => Some("FULLY_UNKNOWN_TRANSPARENT"), Self::FULLY_UNKNOWN_OPAQUE => Some("FULLY_UNKNOWN_OPAQUE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for OpticalFlowExecuteFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( OpticalFlowExecuteFlagsNV::DISABLE_TEMPORAL_HINTS.0, "DISABLE_TEMPORAL_HINTS", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for OpticalFlowGridSizeFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (OpticalFlowGridSizeFlagsNV::UNKNOWN.0, "UNKNOWN"), (OpticalFlowGridSizeFlagsNV::TYPE_1X1.0, "TYPE_1X1"), (OpticalFlowGridSizeFlagsNV::TYPE_2X2.0, "TYPE_2X2"), (OpticalFlowGridSizeFlagsNV::TYPE_4X4.0, "TYPE_4X4"), (OpticalFlowGridSizeFlagsNV::TYPE_8X8.0, "TYPE_8X8"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for OpticalFlowPerformanceLevelNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNKNOWN => Some("UNKNOWN"), Self::SLOW => Some("SLOW"), Self::MEDIUM => Some("MEDIUM"), Self::FAST => Some("FAST"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for OpticalFlowSessionBindingPointNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNKNOWN => Some("UNKNOWN"), Self::INPUT => Some("INPUT"), Self::REFERENCE => Some("REFERENCE"), Self::HINT => Some("HINT"), Self::FLOW_VECTOR => Some("FLOW_VECTOR"), Self::BACKWARD_FLOW_VECTOR => Some("BACKWARD_FLOW_VECTOR"), Self::COST => Some("COST"), Self::BACKWARD_COST => Some("BACKWARD_COST"), Self::GLOBAL_FLOW => Some("GLOBAL_FLOW"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for OpticalFlowSessionCreateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( OpticalFlowSessionCreateFlagsNV::ENABLE_HINT.0, "ENABLE_HINT", ), ( OpticalFlowSessionCreateFlagsNV::ENABLE_COST.0, "ENABLE_COST", ), ( OpticalFlowSessionCreateFlagsNV::ENABLE_GLOBAL_FLOW.0, "ENABLE_GLOBAL_FLOW", ), ( OpticalFlowSessionCreateFlagsNV::ALLOW_REGIONS.0, "ALLOW_REGIONS", ), ( OpticalFlowSessionCreateFlagsNV::BOTH_DIRECTIONS.0, "BOTH_DIRECTIONS", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for OpticalFlowUsageFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (OpticalFlowUsageFlagsNV::UNKNOWN.0, "UNKNOWN"), (OpticalFlowUsageFlagsNV::INPUT.0, "INPUT"), (OpticalFlowUsageFlagsNV::OUTPUT.0, "OUTPUT"), (OpticalFlowUsageFlagsNV::HINT.0, "HINT"), (OpticalFlowUsageFlagsNV::COST.0, "COST"), (OpticalFlowUsageFlagsNV::GLOBAL_FLOW.0, "GLOBAL_FLOW"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for OutOfBandQueueTypeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::RENDER => Some("RENDER"), Self::PRESENT => Some("PRESENT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PeerMemoryFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (PeerMemoryFeatureFlags::COPY_SRC.0, "COPY_SRC"), (PeerMemoryFeatureFlags::COPY_DST.0, "COPY_DST"), (PeerMemoryFeatureFlags::GENERIC_SRC.0, "GENERIC_SRC"), (PeerMemoryFeatureFlags::GENERIC_DST.0, "GENERIC_DST"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PerformanceConfigurationTypeINTEL { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED => { Some("COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED") } _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceCounterDescriptionFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( PerformanceCounterDescriptionFlagsKHR::PERFORMANCE_IMPACTING.0, "PERFORMANCE_IMPACTING", ), ( PerformanceCounterDescriptionFlagsKHR::CONCURRENTLY_IMPACTED.0, "CONCURRENTLY_IMPACTED", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PerformanceCounterScopeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"), Self::RENDER_PASS => Some("RENDER_PASS"), Self::COMMAND => Some("COMMAND"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceCounterStorageKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::INT32 => Some("INT32"), Self::INT64 => Some("INT64"), Self::UINT32 => Some("UINT32"), Self::UINT64 => Some("UINT64"), Self::FLOAT32 => Some("FLOAT32"), Self::FLOAT64 => Some("FLOAT64"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceCounterUnitKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::GENERIC => Some("GENERIC"), Self::PERCENTAGE => Some("PERCENTAGE"), Self::NANOSECONDS => Some("NANOSECONDS"), Self::BYTES => Some("BYTES"), Self::BYTES_PER_SECOND => Some("BYTES_PER_SECOND"), Self::KELVIN => Some("KELVIN"), Self::WATTS => Some("WATTS"), Self::VOLTS => Some("VOLTS"), Self::AMPS => Some("AMPS"), Self::HERTZ => Some("HERTZ"), Self::CYCLES => Some("CYCLES"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceOverrideTypeINTEL { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NULL_HARDWARE => Some("NULL_HARDWARE"), Self::FLUSH_GPU_CACHES => Some("FLUSH_GPU_CACHES"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceParameterTypeINTEL { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::HW_COUNTERS_SUPPORTED => Some("HW_COUNTERS_SUPPORTED"), Self::STREAM_MARKER_VALIDS => Some("STREAM_MARKER_VALIDS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PerformanceValueTypeINTEL { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UINT32 => Some("UINT32"), Self::UINT64 => Some("UINT64"), Self::FLOAT => Some("FLOAT"), Self::BOOL => Some("BOOL"), Self::STRING => Some("STRING"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PhysicalDeviceSchedulingControlsFlagsARM { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[( PhysicalDeviceSchedulingControlsFlagsARM::SHADER_CORE_COUNT.0, "SHADER_CORE_COUNT", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PhysicalDeviceType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OTHER => Some("OTHER"), Self::INTEGRATED_GPU => Some("INTEGRATED_GPU"), Self::DISCRETE_GPU => Some("DISCRETE_GPU"), Self::VIRTUAL_GPU => Some("VIRTUAL_GPU"), Self::CPU => Some("CPU"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineBindPoint { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::GRAPHICS => Some("GRAPHICS"), Self::COMPUTE => Some("COMPUTE"), Self::EXECUTION_GRAPH_AMDX => Some("EXECUTION_GRAPH_AMDX"), Self::RAY_TRACING_KHR => Some("RAY_TRACING_KHR"), Self::SUBPASS_SHADING_HUAWEI => Some("SUBPASS_SHADING_HUAWEI"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineCacheCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( PipelineCacheCreateFlags::EXTERNALLY_SYNCHRONIZED.0, "EXTERNALLY_SYNCHRONIZED", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCacheHeaderVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ONE => Some("ONE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineColorBlendStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( PipelineColorBlendStateCreateFlags::RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT.0, "RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCompilerControlFlagsAMD { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCoverageModulationStateCreateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCoverageReductionStateCreateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCoverageToColorStateCreateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( PipelineCreateFlags::DISABLE_OPTIMIZATION.0, "DISABLE_OPTIMIZATION", ), ( PipelineCreateFlags::ALLOW_DERIVATIVES.0, "ALLOW_DERIVATIVES", ), (PipelineCreateFlags::DERIVATIVE.0, "DERIVATIVE"), ( PipelineCreateFlags::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0, "RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR", ), ( PipelineCreateFlags::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT.0, "RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT", ), ( PipelineCreateFlags::RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR.0, "RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR", ), ( PipelineCreateFlags::RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR.0, "RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR", ), ( PipelineCreateFlags::RAY_TRACING_NO_NULL_MISS_SHADERS_KHR.0, "RAY_TRACING_NO_NULL_MISS_SHADERS_KHR", ), ( PipelineCreateFlags::RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR.0, "RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR", ), ( PipelineCreateFlags::RAY_TRACING_SKIP_TRIANGLES_KHR.0, "RAY_TRACING_SKIP_TRIANGLES_KHR", ), ( PipelineCreateFlags::RAY_TRACING_SKIP_AABBS_KHR.0, "RAY_TRACING_SKIP_AABBS_KHR", ), ( PipelineCreateFlags::RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR.0, "RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR", ), (PipelineCreateFlags::DEFER_COMPILE_NV.0, "DEFER_COMPILE_NV"), ( PipelineCreateFlags::CAPTURE_STATISTICS_KHR.0, "CAPTURE_STATISTICS_KHR", ), ( PipelineCreateFlags::CAPTURE_INTERNAL_REPRESENTATIONS_KHR.0, "CAPTURE_INTERNAL_REPRESENTATIONS_KHR", ), ( PipelineCreateFlags::INDIRECT_BINDABLE_NV.0, "INDIRECT_BINDABLE_NV", ), (PipelineCreateFlags::LIBRARY_KHR.0, "LIBRARY_KHR"), ( PipelineCreateFlags::DESCRIPTOR_BUFFER_EXT.0, "DESCRIPTOR_BUFFER_EXT", ), ( PipelineCreateFlags::RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT.0, "RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT", ), ( PipelineCreateFlags::LINK_TIME_OPTIMIZATION_EXT.0, "LINK_TIME_OPTIMIZATION_EXT", ), ( PipelineCreateFlags::RAY_TRACING_ALLOW_MOTION_NV.0, "RAY_TRACING_ALLOW_MOTION_NV", ), ( PipelineCreateFlags::COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT.0, "COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT", ), ( PipelineCreateFlags::DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT.0, "DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT", ), ( PipelineCreateFlags::RAY_TRACING_OPACITY_MICROMAP_EXT.0, "RAY_TRACING_OPACITY_MICROMAP_EXT", ), ( PipelineCreateFlags::RAY_TRACING_DISPLACEMENT_MICROMAP_NV.0, "RAY_TRACING_DISPLACEMENT_MICROMAP_NV", ), ( PipelineCreateFlags::NO_PROTECTED_ACCESS_EXT.0, "NO_PROTECTED_ACCESS_EXT", ), ( PipelineCreateFlags::PROTECTED_ACCESS_ONLY_EXT.0, "PROTECTED_ACCESS_ONLY_EXT", ), ( PipelineCreateFlags::VIEW_INDEX_FROM_DEVICE_INDEX.0, "VIEW_INDEX_FROM_DEVICE_INDEX", ), (PipelineCreateFlags::DISPATCH_BASE.0, "DISPATCH_BASE"), ( PipelineCreateFlags::FAIL_ON_PIPELINE_COMPILE_REQUIRED.0, "FAIL_ON_PIPELINE_COMPILE_REQUIRED", ), ( PipelineCreateFlags::EARLY_RETURN_ON_FAILURE.0, "EARLY_RETURN_ON_FAILURE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCreateFlags2KHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[ ( PipelineCreateFlags2KHR::DISABLE_OPTIMIZATION.0, "DISABLE_OPTIMIZATION", ), ( PipelineCreateFlags2KHR::ALLOW_DERIVATIVES.0, "ALLOW_DERIVATIVES", ), (PipelineCreateFlags2KHR::DERIVATIVE.0, "DERIVATIVE"), ( PipelineCreateFlags2KHR::VIEW_INDEX_FROM_DEVICE_INDEX.0, "VIEW_INDEX_FROM_DEVICE_INDEX", ), (PipelineCreateFlags2KHR::DISPATCH_BASE.0, "DISPATCH_BASE"), ( PipelineCreateFlags2KHR::DEFER_COMPILE_NV.0, "DEFER_COMPILE_NV", ), ( PipelineCreateFlags2KHR::CAPTURE_STATISTICS.0, "CAPTURE_STATISTICS", ), ( PipelineCreateFlags2KHR::CAPTURE_INTERNAL_REPRESENTATIONS.0, "CAPTURE_INTERNAL_REPRESENTATIONS", ), ( PipelineCreateFlags2KHR::FAIL_ON_PIPELINE_COMPILE_REQUIRED.0, "FAIL_ON_PIPELINE_COMPILE_REQUIRED", ), ( PipelineCreateFlags2KHR::EARLY_RETURN_ON_FAILURE.0, "EARLY_RETURN_ON_FAILURE", ), ( PipelineCreateFlags2KHR::LINK_TIME_OPTIMIZATION_EXT.0, "LINK_TIME_OPTIMIZATION_EXT", ), ( PipelineCreateFlags2KHR::RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT.0, "RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT", ), (PipelineCreateFlags2KHR::LIBRARY.0, "LIBRARY"), ( PipelineCreateFlags2KHR::RAY_TRACING_SKIP_TRIANGLES.0, "RAY_TRACING_SKIP_TRIANGLES", ), ( PipelineCreateFlags2KHR::RAY_TRACING_SKIP_AABBS.0, "RAY_TRACING_SKIP_AABBS", ), ( PipelineCreateFlags2KHR::RAY_TRACING_NO_NULL_ANY_HIT_SHADERS.0, "RAY_TRACING_NO_NULL_ANY_HIT_SHADERS", ), ( PipelineCreateFlags2KHR::RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS.0, "RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS", ), ( PipelineCreateFlags2KHR::RAY_TRACING_NO_NULL_MISS_SHADERS.0, "RAY_TRACING_NO_NULL_MISS_SHADERS", ), ( PipelineCreateFlags2KHR::RAY_TRACING_NO_NULL_INTERSECTION_SHADERS.0, "RAY_TRACING_NO_NULL_INTERSECTION_SHADERS", ), ( PipelineCreateFlags2KHR::RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY.0, "RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY", ), ( PipelineCreateFlags2KHR::INDIRECT_BINDABLE_NV.0, "INDIRECT_BINDABLE_NV", ), ( PipelineCreateFlags2KHR::RAY_TRACING_ALLOW_MOTION_NV.0, "RAY_TRACING_ALLOW_MOTION_NV", ), ( PipelineCreateFlags2KHR::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT.0, "RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT", ), ( PipelineCreateFlags2KHR::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT.0, "RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT", ), ( PipelineCreateFlags2KHR::RAY_TRACING_OPACITY_MICROMAP_EXT.0, "RAY_TRACING_OPACITY_MICROMAP_EXT", ), ( PipelineCreateFlags2KHR::COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT.0, "COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT", ), ( PipelineCreateFlags2KHR::DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT.0, "DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT", ), ( PipelineCreateFlags2KHR::NO_PROTECTED_ACCESS_EXT.0, "NO_PROTECTED_ACCESS_EXT", ), ( PipelineCreateFlags2KHR::PROTECTED_ACCESS_ONLY_EXT.0, "PROTECTED_ACCESS_ONLY_EXT", ), ( PipelineCreateFlags2KHR::RAY_TRACING_DISPLACEMENT_MICROMAP_NV.0, "RAY_TRACING_DISPLACEMENT_MICROMAP_NV", ), ( PipelineCreateFlags2KHR::DESCRIPTOR_BUFFER_EXT.0, "DESCRIPTOR_BUFFER_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineCreationFeedbackFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (PipelineCreationFeedbackFlags::VALID.0, "VALID"), ( PipelineCreationFeedbackFlags::APPLICATION_PIPELINE_CACHE_HIT.0, "APPLICATION_PIPELINE_CACHE_HIT", ), ( PipelineCreationFeedbackFlags::BASE_PIPELINE_ACCELERATION.0, "BASE_PIPELINE_ACCELERATION", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineDepthStencilStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN : & [(Flags , & str)] = & [(PipelineDepthStencilStateCreateFlags :: RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT . 0 , "RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT") , (PipelineDepthStencilStateCreateFlags :: RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT . 0 , "RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT")] ; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineDiscardRectangleStateCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineDynamicStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineExecutableStatisticFormatKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BOOL32 => Some("BOOL32"), Self::INT64 => Some("INT64"), Self::UINT64 => Some("UINT64"), Self::FLOAT64 => Some("FLOAT64"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineInputAssemblyStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineLayoutCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[( PipelineLayoutCreateFlags::INDEPENDENT_SETS_EXT.0, "INDEPENDENT_SETS_EXT", )]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineMultisampleStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineRasterizationConservativeStateCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineRasterizationDepthClipStateCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineRasterizationStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineRasterizationStateStreamCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineRobustnessBufferBehaviorEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEVICE_DEFAULT => Some("DEVICE_DEFAULT"), Self::DISABLED => Some("DISABLED"), Self::ROBUST_BUFFER_ACCESS => Some("ROBUST_BUFFER_ACCESS"), Self::ROBUST_BUFFER_ACCESS_2 => Some("ROBUST_BUFFER_ACCESS_2"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineRobustnessImageBehaviorEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEVICE_DEFAULT => Some("DEVICE_DEFAULT"), Self::DISABLED => Some("DISABLED"), Self::ROBUST_IMAGE_ACCESS => Some("ROBUST_IMAGE_ACCESS"), Self::ROBUST_IMAGE_ACCESS_2 => Some("ROBUST_IMAGE_ACCESS_2"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PipelineShaderStageCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE.0, "ALLOW_VARYING_SUBGROUP_SIZE", ), ( PipelineShaderStageCreateFlags::REQUIRE_FULL_SUBGROUPS.0, "REQUIRE_FULL_SUBGROUPS", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineStageFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (PipelineStageFlags::TOP_OF_PIPE.0, "TOP_OF_PIPE"), (PipelineStageFlags::DRAW_INDIRECT.0, "DRAW_INDIRECT"), (PipelineStageFlags::VERTEX_INPUT.0, "VERTEX_INPUT"), (PipelineStageFlags::VERTEX_SHADER.0, "VERTEX_SHADER"), ( PipelineStageFlags::TESSELLATION_CONTROL_SHADER.0, "TESSELLATION_CONTROL_SHADER", ), ( PipelineStageFlags::TESSELLATION_EVALUATION_SHADER.0, "TESSELLATION_EVALUATION_SHADER", ), (PipelineStageFlags::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"), (PipelineStageFlags::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"), ( PipelineStageFlags::EARLY_FRAGMENT_TESTS.0, "EARLY_FRAGMENT_TESTS", ), ( PipelineStageFlags::LATE_FRAGMENT_TESTS.0, "LATE_FRAGMENT_TESTS", ), ( PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT.0, "COLOR_ATTACHMENT_OUTPUT", ), (PipelineStageFlags::COMPUTE_SHADER.0, "COMPUTE_SHADER"), (PipelineStageFlags::TRANSFER.0, "TRANSFER"), (PipelineStageFlags::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"), (PipelineStageFlags::HOST.0, "HOST"), (PipelineStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"), (PipelineStageFlags::ALL_COMMANDS.0, "ALL_COMMANDS"), ( PipelineStageFlags::TRANSFORM_FEEDBACK_EXT.0, "TRANSFORM_FEEDBACK_EXT", ), ( PipelineStageFlags::CONDITIONAL_RENDERING_EXT.0, "CONDITIONAL_RENDERING_EXT", ), ( PipelineStageFlags::ACCELERATION_STRUCTURE_BUILD_KHR.0, "ACCELERATION_STRUCTURE_BUILD_KHR", ), ( PipelineStageFlags::RAY_TRACING_SHADER_KHR.0, "RAY_TRACING_SHADER_KHR", ), ( PipelineStageFlags::FRAGMENT_DENSITY_PROCESS_EXT.0, "FRAGMENT_DENSITY_PROCESS_EXT", ), ( PipelineStageFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0, "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR", ), ( PipelineStageFlags::COMMAND_PREPROCESS_NV.0, "COMMAND_PREPROCESS_NV", ), (PipelineStageFlags::TASK_SHADER_EXT.0, "TASK_SHADER_EXT"), (PipelineStageFlags::MESH_SHADER_EXT.0, "MESH_SHADER_EXT"), (PipelineStageFlags::NONE.0, "NONE"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineStageFlags2 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags64, &str)] = &[ (PipelineStageFlags2::NONE.0, "NONE"), (PipelineStageFlags2::TOP_OF_PIPE.0, "TOP_OF_PIPE"), (PipelineStageFlags2::DRAW_INDIRECT.0, "DRAW_INDIRECT"), (PipelineStageFlags2::VERTEX_INPUT.0, "VERTEX_INPUT"), (PipelineStageFlags2::VERTEX_SHADER.0, "VERTEX_SHADER"), ( PipelineStageFlags2::TESSELLATION_CONTROL_SHADER.0, "TESSELLATION_CONTROL_SHADER", ), ( PipelineStageFlags2::TESSELLATION_EVALUATION_SHADER.0, "TESSELLATION_EVALUATION_SHADER", ), (PipelineStageFlags2::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"), (PipelineStageFlags2::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"), ( PipelineStageFlags2::EARLY_FRAGMENT_TESTS.0, "EARLY_FRAGMENT_TESTS", ), ( PipelineStageFlags2::LATE_FRAGMENT_TESTS.0, "LATE_FRAGMENT_TESTS", ), ( PipelineStageFlags2::COLOR_ATTACHMENT_OUTPUT.0, "COLOR_ATTACHMENT_OUTPUT", ), (PipelineStageFlags2::COMPUTE_SHADER.0, "COMPUTE_SHADER"), (PipelineStageFlags2::ALL_TRANSFER.0, "ALL_TRANSFER"), (PipelineStageFlags2::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"), (PipelineStageFlags2::HOST.0, "HOST"), (PipelineStageFlags2::ALL_GRAPHICS.0, "ALL_GRAPHICS"), (PipelineStageFlags2::ALL_COMMANDS.0, "ALL_COMMANDS"), (PipelineStageFlags2::COPY.0, "COPY"), (PipelineStageFlags2::RESOLVE.0, "RESOLVE"), (PipelineStageFlags2::BLIT.0, "BLIT"), (PipelineStageFlags2::CLEAR.0, "CLEAR"), (PipelineStageFlags2::INDEX_INPUT.0, "INDEX_INPUT"), ( PipelineStageFlags2::VERTEX_ATTRIBUTE_INPUT.0, "VERTEX_ATTRIBUTE_INPUT", ), ( PipelineStageFlags2::PRE_RASTERIZATION_SHADERS.0, "PRE_RASTERIZATION_SHADERS", ), (PipelineStageFlags2::VIDEO_DECODE_KHR.0, "VIDEO_DECODE_KHR"), (PipelineStageFlags2::VIDEO_ENCODE_KHR.0, "VIDEO_ENCODE_KHR"), ( PipelineStageFlags2::TRANSFORM_FEEDBACK_EXT.0, "TRANSFORM_FEEDBACK_EXT", ), ( PipelineStageFlags2::CONDITIONAL_RENDERING_EXT.0, "CONDITIONAL_RENDERING_EXT", ), ( PipelineStageFlags2::COMMAND_PREPROCESS_NV.0, "COMMAND_PREPROCESS_NV", ), ( PipelineStageFlags2::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0, "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR", ), ( PipelineStageFlags2::ACCELERATION_STRUCTURE_BUILD_KHR.0, "ACCELERATION_STRUCTURE_BUILD_KHR", ), ( PipelineStageFlags2::RAY_TRACING_SHADER_KHR.0, "RAY_TRACING_SHADER_KHR", ), ( PipelineStageFlags2::FRAGMENT_DENSITY_PROCESS_EXT.0, "FRAGMENT_DENSITY_PROCESS_EXT", ), (PipelineStageFlags2::TASK_SHADER_EXT.0, "TASK_SHADER_EXT"), (PipelineStageFlags2::MESH_SHADER_EXT.0, "MESH_SHADER_EXT"), ( PipelineStageFlags2::SUBPASS_SHADER_HUAWEI.0, "SUBPASS_SHADER_HUAWEI", ), ( PipelineStageFlags2::INVOCATION_MASK_HUAWEI.0, "INVOCATION_MASK_HUAWEI", ), ( PipelineStageFlags2::ACCELERATION_STRUCTURE_COPY_KHR.0, "ACCELERATION_STRUCTURE_COPY_KHR", ), ( PipelineStageFlags2::MICROMAP_BUILD_EXT.0, "MICROMAP_BUILD_EXT", ), ( PipelineStageFlags2::CLUSTER_CULLING_SHADER_HUAWEI.0, "CLUSTER_CULLING_SHADER_HUAWEI", ), (PipelineStageFlags2::OPTICAL_FLOW_NV.0, "OPTICAL_FLOW_NV"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineTessellationStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineVertexInputStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineViewportStateCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PipelineViewportSwizzleStateCreateFlagsNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PointClippingBehavior { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ALL_CLIP_PLANES => Some("ALL_CLIP_PLANES"), Self::USER_CLIP_PLANES_ONLY => Some("USER_CLIP_PLANES_ONLY"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PolygonMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FILL => Some("FILL"), Self::LINE => Some("LINE"), Self::POINT => Some("POINT"), Self::FILL_RECTANGLE_NV => Some("FILL_RECTANGLE_NV"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PresentGravityFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (PresentGravityFlagsEXT::MIN.0, "MIN"), (PresentGravityFlagsEXT::MAX.0, "MAX"), (PresentGravityFlagsEXT::CENTERED.0, "CENTERED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PresentModeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::IMMEDIATE => Some("IMMEDIATE"), Self::MAILBOX => Some("MAILBOX"), Self::FIFO => Some("FIFO"), Self::FIFO_RELAXED => Some("FIFO_RELAXED"), Self::SHARED_DEMAND_REFRESH => Some("SHARED_DEMAND_REFRESH"), Self::SHARED_CONTINUOUS_REFRESH => Some("SHARED_CONTINUOUS_REFRESH"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PresentScalingFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (PresentScalingFlagsEXT::ONE_TO_ONE.0, "ONE_TO_ONE"), ( PresentScalingFlagsEXT::ASPECT_RATIO_STRETCH.0, "ASPECT_RATIO_STRETCH", ), (PresentScalingFlagsEXT::STRETCH.0, "STRETCH"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for PrimitiveTopology { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::POINT_LIST => Some("POINT_LIST"), Self::LINE_LIST => Some("LINE_LIST"), Self::LINE_STRIP => Some("LINE_STRIP"), Self::TRIANGLE_LIST => Some("TRIANGLE_LIST"), Self::TRIANGLE_STRIP => Some("TRIANGLE_STRIP"), Self::TRIANGLE_FAN => Some("TRIANGLE_FAN"), Self::LINE_LIST_WITH_ADJACENCY => Some("LINE_LIST_WITH_ADJACENCY"), Self::LINE_STRIP_WITH_ADJACENCY => Some("LINE_STRIP_WITH_ADJACENCY"), Self::TRIANGLE_LIST_WITH_ADJACENCY => Some("TRIANGLE_LIST_WITH_ADJACENCY"), Self::TRIANGLE_STRIP_WITH_ADJACENCY => Some("TRIANGLE_STRIP_WITH_ADJACENCY"), Self::PATCH_LIST => Some("PATCH_LIST"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for PrivateDataSlotCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ProvokingVertexModeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::FIRST_VERTEX => Some("FIRST_VERTEX"), Self::LAST_VERTEX => Some("LAST_VERTEX"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for QueryControlFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(QueryControlFlags::PRECISE.0, "PRECISE")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for QueryPipelineStatisticFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( QueryPipelineStatisticFlags::INPUT_ASSEMBLY_VERTICES.0, "INPUT_ASSEMBLY_VERTICES", ), ( QueryPipelineStatisticFlags::INPUT_ASSEMBLY_PRIMITIVES.0, "INPUT_ASSEMBLY_PRIMITIVES", ), ( QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS.0, "VERTEX_SHADER_INVOCATIONS", ), ( QueryPipelineStatisticFlags::GEOMETRY_SHADER_INVOCATIONS.0, "GEOMETRY_SHADER_INVOCATIONS", ), ( QueryPipelineStatisticFlags::GEOMETRY_SHADER_PRIMITIVES.0, "GEOMETRY_SHADER_PRIMITIVES", ), ( QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS.0, "CLIPPING_INVOCATIONS", ), ( QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES.0, "CLIPPING_PRIMITIVES", ), ( QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS.0, "FRAGMENT_SHADER_INVOCATIONS", ), ( QueryPipelineStatisticFlags::TESSELLATION_CONTROL_SHADER_PATCHES.0, "TESSELLATION_CONTROL_SHADER_PATCHES", ), ( QueryPipelineStatisticFlags::TESSELLATION_EVALUATION_SHADER_INVOCATIONS.0, "TESSELLATION_EVALUATION_SHADER_INVOCATIONS", ), ( QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS.0, "COMPUTE_SHADER_INVOCATIONS", ), ( QueryPipelineStatisticFlags::TASK_SHADER_INVOCATIONS_EXT.0, "TASK_SHADER_INVOCATIONS_EXT", ), ( QueryPipelineStatisticFlags::MESH_SHADER_INVOCATIONS_EXT.0, "MESH_SHADER_INVOCATIONS_EXT", ), ( QueryPipelineStatisticFlags::CLUSTER_CULLING_SHADER_INVOCATIONS_HUAWEI.0, "CLUSTER_CULLING_SHADER_INVOCATIONS_HUAWEI", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for QueryPoolCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for QueryPoolSamplingModeINTEL { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::MANUAL => Some("MANUAL"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for QueryResultFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (QueryResultFlags::TYPE_64.0, "TYPE_64"), (QueryResultFlags::WAIT.0, "WAIT"), (QueryResultFlags::WITH_AVAILABILITY.0, "WITH_AVAILABILITY"), (QueryResultFlags::PARTIAL.0, "PARTIAL"), (QueryResultFlags::WITH_STATUS_KHR.0, "WITH_STATUS_KHR"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for QueryResultStatusKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ERROR => Some("ERROR"), Self::NOT_READY => Some("NOT_READY"), Self::COMPLETE => Some("COMPLETE"), Self::INSUFFICIENTSTREAM_BUFFER_RANGE => Some("INSUFFICIENTSTREAM_BUFFER_RANGE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for QueryType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::OCCLUSION => Some("OCCLUSION"), Self::PIPELINE_STATISTICS => Some("PIPELINE_STATISTICS"), Self::TIMESTAMP => Some("TIMESTAMP"), Self::RESULT_STATUS_ONLY_KHR => Some("RESULT_STATUS_ONLY_KHR"), Self::TRANSFORM_FEEDBACK_STREAM_EXT => Some("TRANSFORM_FEEDBACK_STREAM_EXT"), Self::PERFORMANCE_QUERY_KHR => Some("PERFORMANCE_QUERY_KHR"), Self::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR => { Some("ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR") } Self::ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR => { Some("ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR") } Self::ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV => { Some("ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV") } Self::PERFORMANCE_QUERY_INTEL => Some("PERFORMANCE_QUERY_INTEL"), Self::VIDEO_ENCODE_FEEDBACK_KHR => Some("VIDEO_ENCODE_FEEDBACK_KHR"), Self::MESH_PRIMITIVES_GENERATED_EXT => Some("MESH_PRIMITIVES_GENERATED_EXT"), Self::PRIMITIVES_GENERATED_EXT => Some("PRIMITIVES_GENERATED_EXT"), Self::ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR => { Some("ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR") } Self::ACCELERATION_STRUCTURE_SIZE_KHR => Some("ACCELERATION_STRUCTURE_SIZE_KHR"), Self::MICROMAP_SERIALIZATION_SIZE_EXT => Some("MICROMAP_SERIALIZATION_SIZE_EXT"), Self::MICROMAP_COMPACTED_SIZE_EXT => Some("MICROMAP_COMPACTED_SIZE_EXT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for QueueFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (QueueFlags::GRAPHICS.0, "GRAPHICS"), (QueueFlags::COMPUTE.0, "COMPUTE"), (QueueFlags::TRANSFER.0, "TRANSFER"), (QueueFlags::SPARSE_BINDING.0, "SPARSE_BINDING"), (QueueFlags::VIDEO_DECODE_KHR.0, "VIDEO_DECODE_KHR"), (QueueFlags::VIDEO_ENCODE_KHR.0, "VIDEO_ENCODE_KHR"), (QueueFlags::OPTICAL_FLOW_NV.0, "OPTICAL_FLOW_NV"), (QueueFlags::PROTECTED.0, "PROTECTED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for QueueGlobalPriorityKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::LOW => Some("LOW"), Self::MEDIUM => Some("MEDIUM"), Self::HIGH => Some("HIGH"), Self::REALTIME => Some("REALTIME"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for RasterizationOrderAMD { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::STRICT => Some("STRICT"), Self::RELAXED => Some("RELAXED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for RayTracingInvocationReorderModeNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NONE => Some("NONE"), Self::REORDER => Some("REORDER"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for RayTracingShaderGroupTypeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::GENERAL => Some("GENERAL"), Self::TRIANGLES_HIT_GROUP => Some("TRIANGLES_HIT_GROUP"), Self::PROCEDURAL_HIT_GROUP => Some("PROCEDURAL_HIT_GROUP"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for RenderPassCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(RenderPassCreateFlags::TRANSFORM_QCOM.0, "TRANSFORM_QCOM")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for RenderingFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( RenderingFlags::CONTENTS_SECONDARY_COMMAND_BUFFERS.0, "CONTENTS_SECONDARY_COMMAND_BUFFERS", ), (RenderingFlags::SUSPENDING.0, "SUSPENDING"), (RenderingFlags::RESUMING.0, "RESUMING"), (RenderingFlags::CONTENTS_INLINE_EXT.0, "CONTENTS_INLINE_EXT"), ( RenderingFlags::ENABLE_LEGACY_DITHERING_EXT.0, "ENABLE_LEGACY_DITHERING_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ResolveModeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ResolveModeFlags::NONE.0, "NONE"), (ResolveModeFlags::SAMPLE_ZERO.0, "SAMPLE_ZERO"), (ResolveModeFlags::AVERAGE.0, "AVERAGE"), (ResolveModeFlags::MIN.0, "MIN"), (ResolveModeFlags::MAX.0, "MAX"), ( ResolveModeFlags::EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID.0, "EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SampleCountFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (SampleCountFlags::TYPE_1.0, "TYPE_1"), (SampleCountFlags::TYPE_2.0, "TYPE_2"), (SampleCountFlags::TYPE_4.0, "TYPE_4"), (SampleCountFlags::TYPE_8.0, "TYPE_8"), (SampleCountFlags::TYPE_16.0, "TYPE_16"), (SampleCountFlags::TYPE_32.0, "TYPE_32"), (SampleCountFlags::TYPE_64.0, "TYPE_64"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SamplerAddressMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::REPEAT => Some("REPEAT"), Self::MIRRORED_REPEAT => Some("MIRRORED_REPEAT"), Self::CLAMP_TO_EDGE => Some("CLAMP_TO_EDGE"), Self::CLAMP_TO_BORDER => Some("CLAMP_TO_BORDER"), Self::MIRROR_CLAMP_TO_EDGE => Some("MIRROR_CLAMP_TO_EDGE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SamplerCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (SamplerCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"), ( SamplerCreateFlags::SUBSAMPLED_COARSE_RECONSTRUCTION_EXT.0, "SUBSAMPLED_COARSE_RECONSTRUCTION_EXT", ), ( SamplerCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0, "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT", ), ( SamplerCreateFlags::NON_SEAMLESS_CUBE_MAP_EXT.0, "NON_SEAMLESS_CUBE_MAP_EXT", ), ( SamplerCreateFlags::IMAGE_PROCESSING_QCOM.0, "IMAGE_PROCESSING_QCOM", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SamplerMipmapMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NEAREST => Some("NEAREST"), Self::LINEAR => Some("LINEAR"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SamplerReductionMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::WEIGHTED_AVERAGE => Some("WEIGHTED_AVERAGE"), Self::MIN => Some("MIN"), Self::MAX => Some("MAX"), Self::WEIGHTED_AVERAGE_RANGECLAMP_QCOM => Some("WEIGHTED_AVERAGE_RANGECLAMP_QCOM"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SamplerYcbcrModelConversion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::RGB_IDENTITY => Some("RGB_IDENTITY"), Self::YCBCR_IDENTITY => Some("YCBCR_IDENTITY"), Self::YCBCR_709 => Some("YCBCR_709"), Self::YCBCR_601 => Some("YCBCR_601"), Self::YCBCR_2020 => Some("YCBCR_2020"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SamplerYcbcrRange { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ITU_FULL => Some("ITU_FULL"), Self::ITU_NARROW => Some("ITU_NARROW"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ScopeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEVICE => Some("DEVICE"), Self::WORKGROUP => Some("WORKGROUP"), Self::SUBGROUP => Some("SUBGROUP"), Self::QUEUE_FAMILY => Some("QUEUE_FAMILY"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ScreenSurfaceCreateFlagsQNX { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SemaphoreCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SemaphoreImportFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SemaphoreImportFlags::TEMPORARY.0, "TEMPORARY")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SemaphoreType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BINARY => Some("BINARY"), Self::TIMELINE => Some("TIMELINE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SemaphoreWaitFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SemaphoreWaitFlags::ANY.0, "ANY")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ShaderCodeTypeEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::BINARY => Some("BINARY"), Self::SPIRV => Some("SPIRV"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ShaderCorePropertiesFlagsAMD { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ShaderCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ShaderCreateFlagsEXT::LINK_STAGE.0, "LINK_STAGE"), ( ShaderCreateFlagsEXT::ALLOW_VARYING_SUBGROUP_SIZE.0, "ALLOW_VARYING_SUBGROUP_SIZE", ), ( ShaderCreateFlagsEXT::REQUIRE_FULL_SUBGROUPS.0, "REQUIRE_FULL_SUBGROUPS", ), (ShaderCreateFlagsEXT::NO_TASK_SHADER.0, "NO_TASK_SHADER"), (ShaderCreateFlagsEXT::DISPATCH_BASE.0, "DISPATCH_BASE"), ( ShaderCreateFlagsEXT::FRAGMENT_SHADING_RATE_ATTACHMENT.0, "FRAGMENT_SHADING_RATE_ATTACHMENT", ), ( ShaderCreateFlagsEXT::FRAGMENT_DENSITY_MAP_ATTACHMENT.0, "FRAGMENT_DENSITY_MAP_ATTACHMENT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ShaderFloatControlsIndependence { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::TYPE_32_ONLY => Some("TYPE_32_ONLY"), Self::ALL => Some("ALL"), Self::NONE => Some("NONE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ShaderGroupShaderKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::GENERAL => Some("GENERAL"), Self::CLOSEST_HIT => Some("CLOSEST_HIT"), Self::ANY_HIT => Some("ANY_HIT"), Self::INTERSECTION => Some("INTERSECTION"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ShaderInfoTypeAMD { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::STATISTICS => Some("STATISTICS"), Self::BINARY => Some("BINARY"), Self::DISASSEMBLY => Some("DISASSEMBLY"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ShaderModuleCreateFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ShaderStageFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ShaderStageFlags::VERTEX.0, "VERTEX"), ( ShaderStageFlags::TESSELLATION_CONTROL.0, "TESSELLATION_CONTROL", ), ( ShaderStageFlags::TESSELLATION_EVALUATION.0, "TESSELLATION_EVALUATION", ), (ShaderStageFlags::GEOMETRY.0, "GEOMETRY"), (ShaderStageFlags::FRAGMENT.0, "FRAGMENT"), (ShaderStageFlags::COMPUTE.0, "COMPUTE"), (ShaderStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"), (ShaderStageFlags::ALL.0, "ALL"), (ShaderStageFlags::RAYGEN_KHR.0, "RAYGEN_KHR"), (ShaderStageFlags::ANY_HIT_KHR.0, "ANY_HIT_KHR"), (ShaderStageFlags::CLOSEST_HIT_KHR.0, "CLOSEST_HIT_KHR"), (ShaderStageFlags::MISS_KHR.0, "MISS_KHR"), (ShaderStageFlags::INTERSECTION_KHR.0, "INTERSECTION_KHR"), (ShaderStageFlags::CALLABLE_KHR.0, "CALLABLE_KHR"), (ShaderStageFlags::TASK_EXT.0, "TASK_EXT"), (ShaderStageFlags::MESH_EXT.0, "MESH_EXT"), ( ShaderStageFlags::SUBPASS_SHADING_HUAWEI.0, "SUBPASS_SHADING_HUAWEI", ), ( ShaderStageFlags::CLUSTER_CULLING_HUAWEI.0, "CLUSTER_CULLING_HUAWEI", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ShadingRatePaletteEntryNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::NO_INVOCATIONS => Some("NO_INVOCATIONS"), Self::TYPE_16_INVOCATIONS_PER_PIXEL => Some("TYPE_16_INVOCATIONS_PER_PIXEL"), Self::TYPE_8_INVOCATIONS_PER_PIXEL => Some("TYPE_8_INVOCATIONS_PER_PIXEL"), Self::TYPE_4_INVOCATIONS_PER_PIXEL => Some("TYPE_4_INVOCATIONS_PER_PIXEL"), Self::TYPE_2_INVOCATIONS_PER_PIXEL => Some("TYPE_2_INVOCATIONS_PER_PIXEL"), Self::TYPE_1_INVOCATION_PER_PIXEL => Some("TYPE_1_INVOCATION_PER_PIXEL"), Self::TYPE_1_INVOCATION_PER_2X1_PIXELS => Some("TYPE_1_INVOCATION_PER_2X1_PIXELS"), Self::TYPE_1_INVOCATION_PER_1X2_PIXELS => Some("TYPE_1_INVOCATION_PER_1X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_2X2_PIXELS => Some("TYPE_1_INVOCATION_PER_2X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_4X2_PIXELS => Some("TYPE_1_INVOCATION_PER_4X2_PIXELS"), Self::TYPE_1_INVOCATION_PER_2X4_PIXELS => Some("TYPE_1_INVOCATION_PER_2X4_PIXELS"), Self::TYPE_1_INVOCATION_PER_4X4_PIXELS => Some("TYPE_1_INVOCATION_PER_4X4_PIXELS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SharingMode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::EXCLUSIVE => Some("EXCLUSIVE"), Self::CONCURRENT => Some("CONCURRENT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SparseImageFormatFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (SparseImageFormatFlags::SINGLE_MIPTAIL.0, "SINGLE_MIPTAIL"), ( SparseImageFormatFlags::ALIGNED_MIP_SIZE.0, "ALIGNED_MIP_SIZE", ), ( SparseImageFormatFlags::NONSTANDARD_BLOCK_SIZE.0, "NONSTANDARD_BLOCK_SIZE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SparseMemoryBindFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SparseMemoryBindFlags::METADATA.0, "METADATA")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for StencilFaceFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (StencilFaceFlags::FRONT.0, "FRONT"), (StencilFaceFlags::BACK.0, "BACK"), (StencilFaceFlags::FRONT_AND_BACK.0, "FRONT_AND_BACK"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for StencilOp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::KEEP => Some("KEEP"), Self::ZERO => Some("ZERO"), Self::REPLACE => Some("REPLACE"), Self::INCREMENT_AND_CLAMP => Some("INCREMENT_AND_CLAMP"), Self::DECREMENT_AND_CLAMP => Some("DECREMENT_AND_CLAMP"), Self::INVERT => Some("INVERT"), Self::INCREMENT_AND_WRAP => Some("INCREMENT_AND_WRAP"), Self::DECREMENT_AND_WRAP => Some("DECREMENT_AND_WRAP"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for StreamDescriptorSurfaceCreateFlagsGGP { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for StructureType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::APPLICATION_INFO => Some("APPLICATION_INFO"), Self::INSTANCE_CREATE_INFO => Some("INSTANCE_CREATE_INFO"), Self::DEVICE_QUEUE_CREATE_INFO => Some("DEVICE_QUEUE_CREATE_INFO"), Self::DEVICE_CREATE_INFO => Some("DEVICE_CREATE_INFO"), Self::SUBMIT_INFO => Some("SUBMIT_INFO"), Self::MEMORY_ALLOCATE_INFO => Some("MEMORY_ALLOCATE_INFO"), Self::MAPPED_MEMORY_RANGE => Some("MAPPED_MEMORY_RANGE"), Self::BIND_SPARSE_INFO => Some("BIND_SPARSE_INFO"), Self::FENCE_CREATE_INFO => Some("FENCE_CREATE_INFO"), Self::SEMAPHORE_CREATE_INFO => Some("SEMAPHORE_CREATE_INFO"), Self::EVENT_CREATE_INFO => Some("EVENT_CREATE_INFO"), Self::QUERY_POOL_CREATE_INFO => Some("QUERY_POOL_CREATE_INFO"), Self::BUFFER_CREATE_INFO => Some("BUFFER_CREATE_INFO"), Self::BUFFER_VIEW_CREATE_INFO => Some("BUFFER_VIEW_CREATE_INFO"), Self::IMAGE_CREATE_INFO => Some("IMAGE_CREATE_INFO"), Self::IMAGE_VIEW_CREATE_INFO => Some("IMAGE_VIEW_CREATE_INFO"), Self::SHADER_MODULE_CREATE_INFO => Some("SHADER_MODULE_CREATE_INFO"), Self::PIPELINE_CACHE_CREATE_INFO => Some("PIPELINE_CACHE_CREATE_INFO"), Self::PIPELINE_SHADER_STAGE_CREATE_INFO => Some("PIPELINE_SHADER_STAGE_CREATE_INFO"), Self::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO => { Some("PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO") } Self::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO => { Some("PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO") } Self::PIPELINE_TESSELLATION_STATE_CREATE_INFO => { Some("PIPELINE_TESSELLATION_STATE_CREATE_INFO") } Self::PIPELINE_VIEWPORT_STATE_CREATE_INFO => { Some("PIPELINE_VIEWPORT_STATE_CREATE_INFO") } Self::PIPELINE_RASTERIZATION_STATE_CREATE_INFO => { Some("PIPELINE_RASTERIZATION_STATE_CREATE_INFO") } Self::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO => { Some("PIPELINE_MULTISAMPLE_STATE_CREATE_INFO") } Self::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO => { Some("PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO") } Self::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO => { Some("PIPELINE_COLOR_BLEND_STATE_CREATE_INFO") } Self::PIPELINE_DYNAMIC_STATE_CREATE_INFO => Some("PIPELINE_DYNAMIC_STATE_CREATE_INFO"), Self::GRAPHICS_PIPELINE_CREATE_INFO => Some("GRAPHICS_PIPELINE_CREATE_INFO"), Self::COMPUTE_PIPELINE_CREATE_INFO => Some("COMPUTE_PIPELINE_CREATE_INFO"), Self::PIPELINE_LAYOUT_CREATE_INFO => Some("PIPELINE_LAYOUT_CREATE_INFO"), Self::SAMPLER_CREATE_INFO => Some("SAMPLER_CREATE_INFO"), Self::DESCRIPTOR_SET_LAYOUT_CREATE_INFO => Some("DESCRIPTOR_SET_LAYOUT_CREATE_INFO"), Self::DESCRIPTOR_POOL_CREATE_INFO => Some("DESCRIPTOR_POOL_CREATE_INFO"), Self::DESCRIPTOR_SET_ALLOCATE_INFO => Some("DESCRIPTOR_SET_ALLOCATE_INFO"), Self::WRITE_DESCRIPTOR_SET => Some("WRITE_DESCRIPTOR_SET"), Self::COPY_DESCRIPTOR_SET => Some("COPY_DESCRIPTOR_SET"), Self::FRAMEBUFFER_CREATE_INFO => Some("FRAMEBUFFER_CREATE_INFO"), Self::RENDER_PASS_CREATE_INFO => Some("RENDER_PASS_CREATE_INFO"), Self::COMMAND_POOL_CREATE_INFO => Some("COMMAND_POOL_CREATE_INFO"), Self::COMMAND_BUFFER_ALLOCATE_INFO => Some("COMMAND_BUFFER_ALLOCATE_INFO"), Self::COMMAND_BUFFER_INHERITANCE_INFO => Some("COMMAND_BUFFER_INHERITANCE_INFO"), Self::COMMAND_BUFFER_BEGIN_INFO => Some("COMMAND_BUFFER_BEGIN_INFO"), Self::RENDER_PASS_BEGIN_INFO => Some("RENDER_PASS_BEGIN_INFO"), Self::BUFFER_MEMORY_BARRIER => Some("BUFFER_MEMORY_BARRIER"), Self::IMAGE_MEMORY_BARRIER => Some("IMAGE_MEMORY_BARRIER"), Self::MEMORY_BARRIER => Some("MEMORY_BARRIER"), Self::LOADER_INSTANCE_CREATE_INFO => Some("LOADER_INSTANCE_CREATE_INFO"), Self::LOADER_DEVICE_CREATE_INFO => Some("LOADER_DEVICE_CREATE_INFO"), Self::SWAPCHAIN_CREATE_INFO_KHR => Some("SWAPCHAIN_CREATE_INFO_KHR"), Self::PRESENT_INFO_KHR => Some("PRESENT_INFO_KHR"), Self::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR => { Some("DEVICE_GROUP_PRESENT_CAPABILITIES_KHR") } Self::IMAGE_SWAPCHAIN_CREATE_INFO_KHR => Some("IMAGE_SWAPCHAIN_CREATE_INFO_KHR"), Self::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR => { Some("BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR") } Self::ACQUIRE_NEXT_IMAGE_INFO_KHR => Some("ACQUIRE_NEXT_IMAGE_INFO_KHR"), Self::DEVICE_GROUP_PRESENT_INFO_KHR => Some("DEVICE_GROUP_PRESENT_INFO_KHR"), Self::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR => { Some("DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR") } Self::DISPLAY_MODE_CREATE_INFO_KHR => Some("DISPLAY_MODE_CREATE_INFO_KHR"), Self::DISPLAY_SURFACE_CREATE_INFO_KHR => Some("DISPLAY_SURFACE_CREATE_INFO_KHR"), Self::DISPLAY_PRESENT_INFO_KHR => Some("DISPLAY_PRESENT_INFO_KHR"), Self::XLIB_SURFACE_CREATE_INFO_KHR => Some("XLIB_SURFACE_CREATE_INFO_KHR"), Self::XCB_SURFACE_CREATE_INFO_KHR => Some("XCB_SURFACE_CREATE_INFO_KHR"), Self::WAYLAND_SURFACE_CREATE_INFO_KHR => Some("WAYLAND_SURFACE_CREATE_INFO_KHR"), Self::ANDROID_SURFACE_CREATE_INFO_KHR => Some("ANDROID_SURFACE_CREATE_INFO_KHR"), Self::WIN32_SURFACE_CREATE_INFO_KHR => Some("WIN32_SURFACE_CREATE_INFO_KHR"), Self::NATIVE_BUFFER_ANDROID => Some("NATIVE_BUFFER_ANDROID"), Self::SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID => { Some("SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID") } Self::PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID => { Some("PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID") } Self::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT => { Some("DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT") } Self::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD => { Some("PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD") } Self::DEBUG_MARKER_OBJECT_NAME_INFO_EXT => Some("DEBUG_MARKER_OBJECT_NAME_INFO_EXT"), Self::DEBUG_MARKER_OBJECT_TAG_INFO_EXT => Some("DEBUG_MARKER_OBJECT_TAG_INFO_EXT"), Self::DEBUG_MARKER_MARKER_INFO_EXT => Some("DEBUG_MARKER_MARKER_INFO_EXT"), Self::VIDEO_PROFILE_INFO_KHR => Some("VIDEO_PROFILE_INFO_KHR"), Self::VIDEO_CAPABILITIES_KHR => Some("VIDEO_CAPABILITIES_KHR"), Self::VIDEO_PICTURE_RESOURCE_INFO_KHR => Some("VIDEO_PICTURE_RESOURCE_INFO_KHR"), Self::VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR => { Some("VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR") } Self::BIND_VIDEO_SESSION_MEMORY_INFO_KHR => Some("BIND_VIDEO_SESSION_MEMORY_INFO_KHR"), Self::VIDEO_SESSION_CREATE_INFO_KHR => Some("VIDEO_SESSION_CREATE_INFO_KHR"), Self::VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR => { Some("VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR") } Self::VIDEO_BEGIN_CODING_INFO_KHR => Some("VIDEO_BEGIN_CODING_INFO_KHR"), Self::VIDEO_END_CODING_INFO_KHR => Some("VIDEO_END_CODING_INFO_KHR"), Self::VIDEO_CODING_CONTROL_INFO_KHR => Some("VIDEO_CODING_CONTROL_INFO_KHR"), Self::VIDEO_REFERENCE_SLOT_INFO_KHR => Some("VIDEO_REFERENCE_SLOT_INFO_KHR"), Self::QUEUE_FAMILY_VIDEO_PROPERTIES_KHR => Some("QUEUE_FAMILY_VIDEO_PROPERTIES_KHR"), Self::VIDEO_PROFILE_LIST_INFO_KHR => Some("VIDEO_PROFILE_LIST_INFO_KHR"), Self::PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR => { Some("PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR") } Self::VIDEO_FORMAT_PROPERTIES_KHR => Some("VIDEO_FORMAT_PROPERTIES_KHR"), Self::QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR => { Some("QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR") } Self::VIDEO_DECODE_INFO_KHR => Some("VIDEO_DECODE_INFO_KHR"), Self::VIDEO_DECODE_CAPABILITIES_KHR => Some("VIDEO_DECODE_CAPABILITIES_KHR"), Self::VIDEO_DECODE_USAGE_INFO_KHR => Some("VIDEO_DECODE_USAGE_INFO_KHR"), Self::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV => { Some("DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV") } Self::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV => { Some("DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV") } Self::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV => { Some("DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV") } Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT => { Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT") } Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT") } Self::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT => { Some("PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT") } Self::CU_MODULE_CREATE_INFO_NVX => Some("CU_MODULE_CREATE_INFO_NVX"), Self::CU_FUNCTION_CREATE_INFO_NVX => Some("CU_FUNCTION_CREATE_INFO_NVX"), Self::CU_LAUNCH_INFO_NVX => Some("CU_LAUNCH_INFO_NVX"), Self::IMAGE_VIEW_HANDLE_INFO_NVX => Some("IMAGE_VIEW_HANDLE_INFO_NVX"), Self::IMAGE_VIEW_ADDRESS_PROPERTIES_NVX => Some("IMAGE_VIEW_ADDRESS_PROPERTIES_NVX"), Self::VIDEO_ENCODE_H264_CAPABILITIES_KHR => Some("VIDEO_ENCODE_H264_CAPABILITIES_KHR"), Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR => { Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR") } Self::VIDEO_ENCODE_H264_PICTURE_INFO_KHR => Some("VIDEO_ENCODE_H264_PICTURE_INFO_KHR"), Self::VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR => { Some("VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR") } Self::VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR => { Some("VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR") } Self::VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR => { Some("VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR") } Self::VIDEO_ENCODE_H264_PROFILE_INFO_KHR => Some("VIDEO_ENCODE_H264_PROFILE_INFO_KHR"), Self::VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR => { Some("VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR") } Self::VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR => { Some("VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR") } Self::VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR => { Some("VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR") } Self::VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR => { Some("VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR") } Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR => { Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR") } Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR => { Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR") } Self::VIDEO_ENCODE_H265_CAPABILITIES_KHR => Some("VIDEO_ENCODE_H265_CAPABILITIES_KHR"), Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR => { Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR") } Self::VIDEO_ENCODE_H265_PICTURE_INFO_KHR => Some("VIDEO_ENCODE_H265_PICTURE_INFO_KHR"), Self::VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR => { Some("VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR") } Self::VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR => { Some("VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR") } Self::VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR => { Some("VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR") } Self::VIDEO_ENCODE_H265_PROFILE_INFO_KHR => Some("VIDEO_ENCODE_H265_PROFILE_INFO_KHR"), Self::VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR => { Some("VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR") } Self::VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR => { Some("VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR") } Self::VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR => { Some("VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR") } Self::VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR => { Some("VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR") } Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR => { Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR") } Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR => { Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR") } Self::VIDEO_DECODE_H264_CAPABILITIES_KHR => Some("VIDEO_DECODE_H264_CAPABILITIES_KHR"), Self::VIDEO_DECODE_H264_PICTURE_INFO_KHR => Some("VIDEO_DECODE_H264_PICTURE_INFO_KHR"), Self::VIDEO_DECODE_H264_PROFILE_INFO_KHR => Some("VIDEO_DECODE_H264_PROFILE_INFO_KHR"), Self::VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR => { Some("VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR") } Self::VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR => { Some("VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR") } Self::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD => { Some("TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD") } Self::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR => { Some("RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR") } Self::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT => { Some("RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT") } Self::ATTACHMENT_SAMPLE_COUNT_INFO_AMD => Some("ATTACHMENT_SAMPLE_COUNT_INFO_AMD"), Self::MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX => { Some("MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX") } Self::STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP => { Some("STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP") } Self::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV => { Some("PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV") } Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV => { Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV") } Self::EXPORT_MEMORY_ALLOCATE_INFO_NV => Some("EXPORT_MEMORY_ALLOCATE_INFO_NV"), Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"), Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"), Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV => { Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV") } Self::VALIDATION_FLAGS_EXT => Some("VALIDATION_FLAGS_EXT"), Self::VI_SURFACE_CREATE_INFO_NN => Some("VI_SURFACE_CREATE_INFO_NN"), Self::IMAGE_VIEW_ASTC_DECODE_MODE_EXT => Some("IMAGE_VIEW_ASTC_DECODE_MODE_EXT"), Self::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT") } Self::PIPELINE_ROBUSTNESS_CREATE_INFO_EXT => { Some("PIPELINE_ROBUSTNESS_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT") } Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR => { Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR") } Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR => { Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR") } Self::MEMORY_WIN32_HANDLE_PROPERTIES_KHR => Some("MEMORY_WIN32_HANDLE_PROPERTIES_KHR"), Self::MEMORY_GET_WIN32_HANDLE_INFO_KHR => Some("MEMORY_GET_WIN32_HANDLE_INFO_KHR"), Self::IMPORT_MEMORY_FD_INFO_KHR => Some("IMPORT_MEMORY_FD_INFO_KHR"), Self::MEMORY_FD_PROPERTIES_KHR => Some("MEMORY_FD_PROPERTIES_KHR"), Self::MEMORY_GET_FD_INFO_KHR => Some("MEMORY_GET_FD_INFO_KHR"), Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR => { Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR") } Self::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => { Some("IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR") } Self::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => { Some("EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR") } Self::D3D12_FENCE_SUBMIT_INFO_KHR => Some("D3D12_FENCE_SUBMIT_INFO_KHR"), Self::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR => { Some("SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR") } Self::IMPORT_SEMAPHORE_FD_INFO_KHR => Some("IMPORT_SEMAPHORE_FD_INFO_KHR"), Self::SEMAPHORE_GET_FD_INFO_KHR => Some("SEMAPHORE_GET_FD_INFO_KHR"), Self::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR") } Self::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT => { Some("COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT") } Self::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT => { Some("PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT") } Self::CONDITIONAL_RENDERING_BEGIN_INFO_EXT => { Some("CONDITIONAL_RENDERING_BEGIN_INFO_EXT") } Self::PRESENT_REGIONS_KHR => Some("PRESENT_REGIONS_KHR"), Self::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV => { Some("PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV") } Self::SURFACE_CAPABILITIES_2_EXT => Some("SURFACE_CAPABILITIES_2_EXT"), Self::DISPLAY_POWER_INFO_EXT => Some("DISPLAY_POWER_INFO_EXT"), Self::DEVICE_EVENT_INFO_EXT => Some("DEVICE_EVENT_INFO_EXT"), Self::DISPLAY_EVENT_INFO_EXT => Some("DISPLAY_EVENT_INFO_EXT"), Self::SWAPCHAIN_COUNTER_CREATE_INFO_EXT => Some("SWAPCHAIN_COUNTER_CREATE_INFO_EXT"), Self::PRESENT_TIMES_INFO_GOOGLE => Some("PRESENT_TIMES_INFO_GOOGLE"), Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX => { Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX") } Self::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV => { Some("PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV") } Self::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT") } Self::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT => { Some("PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT") } Self::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT => { Some("PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT") } Self::PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT => { Some("PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT") } Self::HDR_METADATA_EXT => Some("HDR_METADATA_EXT"), Self::PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG => { Some("PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG") } Self::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR => { Some("SHARED_PRESENT_SURFACE_CAPABILITIES_KHR") } Self::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"), Self::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"), Self::FENCE_GET_WIN32_HANDLE_INFO_KHR => Some("FENCE_GET_WIN32_HANDLE_INFO_KHR"), Self::IMPORT_FENCE_FD_INFO_KHR => Some("IMPORT_FENCE_FD_INFO_KHR"), Self::FENCE_GET_FD_INFO_KHR => Some("FENCE_GET_FD_INFO_KHR"), Self::PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR => { Some("PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR") } Self::PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR") } Self::QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR => { Some("QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR") } Self::PERFORMANCE_QUERY_SUBMIT_INFO_KHR => Some("PERFORMANCE_QUERY_SUBMIT_INFO_KHR"), Self::ACQUIRE_PROFILING_LOCK_INFO_KHR => Some("ACQUIRE_PROFILING_LOCK_INFO_KHR"), Self::PERFORMANCE_COUNTER_KHR => Some("PERFORMANCE_COUNTER_KHR"), Self::PERFORMANCE_COUNTER_DESCRIPTION_KHR => { Some("PERFORMANCE_COUNTER_DESCRIPTION_KHR") } Self::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR => Some("PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"), Self::SURFACE_CAPABILITIES_2_KHR => Some("SURFACE_CAPABILITIES_2_KHR"), Self::SURFACE_FORMAT_2_KHR => Some("SURFACE_FORMAT_2_KHR"), Self::DISPLAY_PROPERTIES_2_KHR => Some("DISPLAY_PROPERTIES_2_KHR"), Self::DISPLAY_PLANE_PROPERTIES_2_KHR => Some("DISPLAY_PLANE_PROPERTIES_2_KHR"), Self::DISPLAY_MODE_PROPERTIES_2_KHR => Some("DISPLAY_MODE_PROPERTIES_2_KHR"), Self::DISPLAY_PLANE_INFO_2_KHR => Some("DISPLAY_PLANE_INFO_2_KHR"), Self::DISPLAY_PLANE_CAPABILITIES_2_KHR => Some("DISPLAY_PLANE_CAPABILITIES_2_KHR"), Self::IOS_SURFACE_CREATE_INFO_MVK => Some("IOS_SURFACE_CREATE_INFO_MVK"), Self::MACOS_SURFACE_CREATE_INFO_MVK => Some("MACOS_SURFACE_CREATE_INFO_MVK"), Self::DEBUG_UTILS_OBJECT_NAME_INFO_EXT => Some("DEBUG_UTILS_OBJECT_NAME_INFO_EXT"), Self::DEBUG_UTILS_OBJECT_TAG_INFO_EXT => Some("DEBUG_UTILS_OBJECT_TAG_INFO_EXT"), Self::DEBUG_UTILS_LABEL_EXT => Some("DEBUG_UTILS_LABEL_EXT"), Self::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT => { Some("DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT") } Self::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT => { Some("DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT") } Self::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID => { Some("ANDROID_HARDWARE_BUFFER_USAGE_ANDROID") } Self::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID => { Some("ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID") } Self::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID => { Some("ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID") } Self::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => { Some("IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID") } Self::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => { Some("MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID") } Self::EXTERNAL_FORMAT_ANDROID => Some("EXTERNAL_FORMAT_ANDROID"), Self::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID => { Some("ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID") } Self::PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX => { Some("PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX") } Self::PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX => { Some("PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX") } Self::EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX => { Some("EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX") } Self::EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX => { Some("EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX") } Self::PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX => { Some("PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX") } Self::SAMPLE_LOCATIONS_INFO_EXT => Some("SAMPLE_LOCATIONS_INFO_EXT"), Self::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT => { Some("RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT") } Self::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT => { Some("PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT") } Self::MULTISAMPLE_PROPERTIES_EXT => Some("MULTISAMPLE_PROPERTIES_EXT"), Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT => { Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT") } Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT") } Self::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT => { Some("PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT") } Self::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV => { Some("PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV") } Self::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR => { Some("WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR") } Self::ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR => { Some("ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR") } Self::ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR => { Some("ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR") } Self::ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR => { Some("ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR") } Self::ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR => { Some("ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR") } Self::ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR => { Some("ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR") } Self::ACCELERATION_STRUCTURE_GEOMETRY_KHR => { Some("ACCELERATION_STRUCTURE_GEOMETRY_KHR") } Self::ACCELERATION_STRUCTURE_VERSION_INFO_KHR => { Some("ACCELERATION_STRUCTURE_VERSION_INFO_KHR") } Self::COPY_ACCELERATION_STRUCTURE_INFO_KHR => { Some("COPY_ACCELERATION_STRUCTURE_INFO_KHR") } Self::COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR => { Some("COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR") } Self::COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR => { Some("COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR") } Self::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR => { Some("PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR") } Self::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR") } Self::ACCELERATION_STRUCTURE_CREATE_INFO_KHR => { Some("ACCELERATION_STRUCTURE_CREATE_INFO_KHR") } Self::ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR => { Some("ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR") } Self::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR => { Some("PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR") } Self::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR") } Self::RAY_TRACING_PIPELINE_CREATE_INFO_KHR => { Some("RAY_TRACING_PIPELINE_CREATE_INFO_KHR") } Self::RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR => { Some("RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR") } Self::RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR => { Some("RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR => { Some("PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR") } Self::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV => { Some("PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV") } Self::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV => { Some("PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV") } Self::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV") } Self::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT => { Some("DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT") } Self::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT => { Some("PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT") } Self::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT => { Some("IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT") } Self::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT => { Some("IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT") } Self::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT => { Some("IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT") } Self::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT => { Some("DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT") } Self::VALIDATION_CACHE_CREATE_INFO_EXT => Some("VALIDATION_CACHE_CREATE_INFO_EXT"), Self::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT => { Some("SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR => { Some("PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR") } Self::PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR") } Self::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV => { Some("PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV") } Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV => { Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV") } Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV") } Self::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV => { Some("PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV") } Self::RAY_TRACING_PIPELINE_CREATE_INFO_NV => { Some("RAY_TRACING_PIPELINE_CREATE_INFO_NV") } Self::ACCELERATION_STRUCTURE_CREATE_INFO_NV => { Some("ACCELERATION_STRUCTURE_CREATE_INFO_NV") } Self::GEOMETRY_NV => Some("GEOMETRY_NV"), Self::GEOMETRY_TRIANGLES_NV => Some("GEOMETRY_TRIANGLES_NV"), Self::GEOMETRY_AABB_NV => Some("GEOMETRY_AABB_NV"), Self::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV => { Some("BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV") } Self::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV => { Some("WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV") } Self::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV => { Some("ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV") } Self::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV") } Self::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV => { Some("RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV") } Self::ACCELERATION_STRUCTURE_INFO_NV => Some("ACCELERATION_STRUCTURE_INFO_NV"), Self::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV => { Some("PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV") } Self::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV => { Some("PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV") } Self::PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT => { Some("PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT") } Self::FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT => { Some("FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT") } Self::IMPORT_MEMORY_HOST_POINTER_INFO_EXT => { Some("IMPORT_MEMORY_HOST_POINTER_INFO_EXT") } Self::MEMORY_HOST_POINTER_PROPERTIES_EXT => Some("MEMORY_HOST_POINTER_PROPERTIES_EXT"), Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR") } Self::PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD => { Some("PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD") } Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD => { Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD") } Self::VIDEO_DECODE_H265_CAPABILITIES_KHR => Some("VIDEO_DECODE_H265_CAPABILITIES_KHR"), Self::VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR => { Some("VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR") } Self::VIDEO_DECODE_H265_PROFILE_INFO_KHR => Some("VIDEO_DECODE_H265_PROFILE_INFO_KHR"), Self::VIDEO_DECODE_H265_PICTURE_INFO_KHR => Some("VIDEO_DECODE_H265_PICTURE_INFO_KHR"), Self::VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR => { Some("VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR") } Self::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR => { Some("DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR => { Some("PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR") } Self::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR => { Some("QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR") } Self::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD => { Some("DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD") } Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT") } Self::PRESENT_FRAME_TOKEN_GGP => Some("PRESENT_FRAME_TOKEN_GGP"), Self::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV => { Some("PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV") } Self::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV => { Some("PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV") } Self::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV => { Some("PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV") } Self::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV => { Some("PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV") } Self::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV => { Some("PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV") } Self::CHECKPOINT_DATA_NV => Some("CHECKPOINT_DATA_NV"), Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV => { Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL => { Some("PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL") } Self::QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL => { Some("QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL") } Self::INITIALIZE_PERFORMANCE_API_INFO_INTEL => { Some("INITIALIZE_PERFORMANCE_API_INFO_INTEL") } Self::PERFORMANCE_MARKER_INFO_INTEL => Some("PERFORMANCE_MARKER_INFO_INTEL"), Self::PERFORMANCE_STREAM_MARKER_INFO_INTEL => { Some("PERFORMANCE_STREAM_MARKER_INFO_INTEL") } Self::PERFORMANCE_OVERRIDE_INFO_INTEL => Some("PERFORMANCE_OVERRIDE_INFO_INTEL"), Self::PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL => { Some("PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL") } Self::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT") } Self::DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD => { Some("DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD") } Self::SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD => { Some("SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD") } Self::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA => { Some("IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA") } Self::METAL_SURFACE_CREATE_INFO_EXT => Some("METAL_SURFACE_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT") } Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT") } Self::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT => { Some("RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT") } Self::FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR => { Some("FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR") } Self::PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR => { Some("PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR") } Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD => { Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD") } Self::PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD => { Some("PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD") } Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR => { Some("PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR") } Self::RENDERING_ATTACHMENT_LOCATION_INFO_KHR => { Some("RENDERING_ATTACHMENT_LOCATION_INFO_KHR") } Self::RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR => { Some("RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR") } Self::PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR") } Self::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT") } Self::MEMORY_PRIORITY_ALLOCATE_INFO_EXT => Some("MEMORY_PRIORITY_ALLOCATE_INFO_EXT"), Self::SURFACE_PROTECTED_CAPABILITIES_KHR => Some("SURFACE_PROTECTED_CAPABILITIES_KHR"), Self::PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV => { Some("PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV") } Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT") } Self::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT => { Some("BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT") } Self::VALIDATION_FEATURES_EXT => Some("VALIDATION_FEATURES_EXT"), Self::PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR => { Some("PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR") } Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV => { Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV") } Self::COOPERATIVE_MATRIX_PROPERTIES_NV => Some("COOPERATIVE_MATRIX_PROPERTIES_NV"), Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV => { Some("PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV") } Self::PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV => { Some("PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV") } Self::FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV => { Some("FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT") } Self::PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT") } Self::PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT => { Some("PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT") } Self::SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT => { Some("SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT") } Self::SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT => { Some("SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT") } Self::SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT => { Some("SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT") } Self::HEADLESS_SURFACE_CREATE_INFO_EXT => Some("HEADLESS_SURFACE_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT") } Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT") } Self::PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR => { Some("PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR") } Self::PIPELINE_INFO_KHR => Some("PIPELINE_INFO_KHR"), Self::PIPELINE_EXECUTABLE_PROPERTIES_KHR => Some("PIPELINE_EXECUTABLE_PROPERTIES_KHR"), Self::PIPELINE_EXECUTABLE_INFO_KHR => Some("PIPELINE_EXECUTABLE_INFO_KHR"), Self::PIPELINE_EXECUTABLE_STATISTIC_KHR => Some("PIPELINE_EXECUTABLE_STATISTIC_KHR"), Self::PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR => { Some("PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR") } Self::PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT") } Self::PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT") } Self::MEMORY_TO_IMAGE_COPY_EXT => Some("MEMORY_TO_IMAGE_COPY_EXT"), Self::IMAGE_TO_MEMORY_COPY_EXT => Some("IMAGE_TO_MEMORY_COPY_EXT"), Self::COPY_IMAGE_TO_MEMORY_INFO_EXT => Some("COPY_IMAGE_TO_MEMORY_INFO_EXT"), Self::COPY_MEMORY_TO_IMAGE_INFO_EXT => Some("COPY_MEMORY_TO_IMAGE_INFO_EXT"), Self::HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT => { Some("HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT") } Self::COPY_IMAGE_TO_IMAGE_INFO_EXT => Some("COPY_IMAGE_TO_IMAGE_INFO_EXT"), Self::SUBRESOURCE_HOST_MEMCPY_SIZE_EXT => Some("SUBRESOURCE_HOST_MEMCPY_SIZE_EXT"), Self::HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT => { Some("HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT") } Self::MEMORY_MAP_INFO_KHR => Some("MEMORY_MAP_INFO_KHR"), Self::MEMORY_UNMAP_INFO_KHR => Some("MEMORY_UNMAP_INFO_KHR"), Self::PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT") } Self::PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT") } Self::MEMORY_MAP_PLACED_INFO_EXT => Some("MEMORY_MAP_PLACED_INFO_EXT"), Self::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT") } Self::SURFACE_PRESENT_MODE_EXT => Some("SURFACE_PRESENT_MODE_EXT"), Self::SURFACE_PRESENT_SCALING_CAPABILITIES_EXT => { Some("SURFACE_PRESENT_SCALING_CAPABILITIES_EXT") } Self::SURFACE_PRESENT_MODE_COMPATIBILITY_EXT => { Some("SURFACE_PRESENT_MODE_COMPATIBILITY_EXT") } Self::PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT") } Self::SWAPCHAIN_PRESENT_FENCE_INFO_EXT => Some("SWAPCHAIN_PRESENT_FENCE_INFO_EXT"), Self::SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT => { Some("SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT") } Self::SWAPCHAIN_PRESENT_MODE_INFO_EXT => Some("SWAPCHAIN_PRESENT_MODE_INFO_EXT"), Self::SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT => { Some("SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT") } Self::RELEASE_SWAPCHAIN_IMAGES_INFO_EXT => Some("RELEASE_SWAPCHAIN_IMAGES_INFO_EXT"), Self::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV") } Self::GRAPHICS_SHADER_GROUP_CREATE_INFO_NV => { Some("GRAPHICS_SHADER_GROUP_CREATE_INFO_NV") } Self::GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV => { Some("GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV") } Self::INDIRECT_COMMANDS_LAYOUT_TOKEN_NV => Some("INDIRECT_COMMANDS_LAYOUT_TOKEN_NV"), Self::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV => { Some("INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV") } Self::GENERATED_COMMANDS_INFO_NV => Some("GENERATED_COMMANDS_INFO_NV"), Self::GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV => { Some("GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV") } Self::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV => { Some("PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV") } Self::PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV => { Some("PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV") } Self::COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV => { Some("COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV") } Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT") } Self::COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM => { Some("COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM") } Self::RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM => { Some("RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM") } Self::PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT") } Self::DEPTH_BIAS_INFO_EXT => Some("DEPTH_BIAS_INFO_EXT"), Self::DEPTH_BIAS_REPRESENTATION_INFO_EXT => Some("DEPTH_BIAS_REPRESENTATION_INFO_EXT"), Self::PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT") } Self::DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT => { Some("DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT") } Self::DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT => { Some("DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT") } Self::PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT => { Some("PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT") } Self::PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT") } Self::SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT => { Some("SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT => { Some("PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT") } Self::PIPELINE_LIBRARY_CREATE_INFO_KHR => Some("PIPELINE_LIBRARY_CREATE_INFO_KHR"), Self::PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV => { Some("PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV") } Self::SURFACE_CAPABILITIES_PRESENT_BARRIER_NV => { Some("SURFACE_CAPABILITIES_PRESENT_BARRIER_NV") } Self::SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV => { Some("SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV") } Self::PRESENT_ID_KHR => Some("PRESENT_ID_KHR"), Self::PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR => { Some("PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR") } Self::VIDEO_ENCODE_INFO_KHR => Some("VIDEO_ENCODE_INFO_KHR"), Self::VIDEO_ENCODE_RATE_CONTROL_INFO_KHR => Some("VIDEO_ENCODE_RATE_CONTROL_INFO_KHR"), Self::VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR => { Some("VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR") } Self::VIDEO_ENCODE_CAPABILITIES_KHR => Some("VIDEO_ENCODE_CAPABILITIES_KHR"), Self::VIDEO_ENCODE_USAGE_INFO_KHR => Some("VIDEO_ENCODE_USAGE_INFO_KHR"), Self::QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR => { Some("QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR => { Some("PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR") } Self::VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR => { Some("VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR") } Self::VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR => { Some("VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR") } Self::VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR => { Some("VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR") } Self::VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR => { Some("VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR") } Self::PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV => { Some("PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV") } Self::DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV => { Some("DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV") } Self::CUDA_MODULE_CREATE_INFO_NV => Some("CUDA_MODULE_CREATE_INFO_NV"), Self::CUDA_FUNCTION_CREATE_INFO_NV => Some("CUDA_FUNCTION_CREATE_INFO_NV"), Self::CUDA_LAUNCH_INFO_NV => Some("CUDA_LAUNCH_INFO_NV"), Self::PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV => { Some("PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV") } Self::PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV") } Self::QUERY_LOW_LATENCY_SUPPORT_NV => Some("QUERY_LOW_LATENCY_SUPPORT_NV"), Self::EXPORT_METAL_OBJECT_CREATE_INFO_EXT => { Some("EXPORT_METAL_OBJECT_CREATE_INFO_EXT") } Self::EXPORT_METAL_OBJECTS_INFO_EXT => Some("EXPORT_METAL_OBJECTS_INFO_EXT"), Self::EXPORT_METAL_DEVICE_INFO_EXT => Some("EXPORT_METAL_DEVICE_INFO_EXT"), Self::EXPORT_METAL_COMMAND_QUEUE_INFO_EXT => { Some("EXPORT_METAL_COMMAND_QUEUE_INFO_EXT") } Self::EXPORT_METAL_BUFFER_INFO_EXT => Some("EXPORT_METAL_BUFFER_INFO_EXT"), Self::IMPORT_METAL_BUFFER_INFO_EXT => Some("IMPORT_METAL_BUFFER_INFO_EXT"), Self::EXPORT_METAL_TEXTURE_INFO_EXT => Some("EXPORT_METAL_TEXTURE_INFO_EXT"), Self::IMPORT_METAL_TEXTURE_INFO_EXT => Some("IMPORT_METAL_TEXTURE_INFO_EXT"), Self::EXPORT_METAL_IO_SURFACE_INFO_EXT => Some("EXPORT_METAL_IO_SURFACE_INFO_EXT"), Self::IMPORT_METAL_IO_SURFACE_INFO_EXT => Some("IMPORT_METAL_IO_SURFACE_INFO_EXT"), Self::EXPORT_METAL_SHARED_EVENT_INFO_EXT => Some("EXPORT_METAL_SHARED_EVENT_INFO_EXT"), Self::IMPORT_METAL_SHARED_EVENT_INFO_EXT => Some("IMPORT_METAL_SHARED_EVENT_INFO_EXT"), Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV => { Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV") } Self::CHECKPOINT_DATA_2_NV => Some("CHECKPOINT_DATA_2_NV"), Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT") } Self::DESCRIPTOR_ADDRESS_INFO_EXT => Some("DESCRIPTOR_ADDRESS_INFO_EXT"), Self::DESCRIPTOR_GET_INFO_EXT => Some("DESCRIPTOR_GET_INFO_EXT"), Self::BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => { Some("BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT") } Self::IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => { Some("IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT") } Self::IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => { Some("IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT") } Self::SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => { Some("SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT") } Self::OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT => { Some("OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT") } Self::DESCRIPTOR_BUFFER_BINDING_INFO_EXT => Some("DESCRIPTOR_BUFFER_BINDING_INFO_EXT"), Self::DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT => { Some("DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT") } Self::ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => { Some("ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT") } Self::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT") } Self::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT") } Self::GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT => { Some("GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD => { Some("PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR") } Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV => { Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV") } Self::PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV => { Some("PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV") } Self::ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV => { Some("ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV") } Self::PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV => { Some("PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV") } Self::ACCELERATION_STRUCTURE_MOTION_INFO_NV => { Some("ACCELERATION_STRUCTURE_MOTION_INFO_NV") } Self::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT") } Self::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT") } Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT") } Self::COPY_COMMAND_TRANSFORM_INFO_QCOM => Some("COPY_COMMAND_TRANSFORM_INFO_QCOM"), Self::PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR => { Some("PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR") } Self::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT => { Some("PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT") } Self::IMAGE_COMPRESSION_CONTROL_EXT => Some("IMAGE_COMPRESSION_CONTROL_EXT"), Self::IMAGE_COMPRESSION_PROPERTIES_EXT => Some("IMAGE_COMPRESSION_PROPERTIES_EXT"), Self::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT") } Self::PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_FAULT_FEATURES_EXT => Some("PHYSICAL_DEVICE_FAULT_FEATURES_EXT"), Self::DEVICE_FAULT_COUNTS_EXT => Some("DEVICE_FAULT_COUNTS_EXT"), Self::DEVICE_FAULT_INFO_EXT => Some("DEVICE_FAULT_INFO_EXT"), Self::PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT") } Self::DIRECTFB_SURFACE_CREATE_INFO_EXT => Some("DIRECTFB_SURFACE_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT") } Self::VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT => { Some("VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT") } Self::VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT => { Some("VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT") } Self::PHYSICAL_DEVICE_DRM_PROPERTIES_EXT => Some("PHYSICAL_DEVICE_DRM_PROPERTIES_EXT"), Self::PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT") } Self::DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT => { Some("DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT") } Self::PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT") } Self::PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT => { Some("PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT") } Self::IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA => { Some("IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA") } Self::MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA => { Some("MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA") } Self::MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA => { Some("MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA") } Self::IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA => { Some("IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA") } Self::SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA => { Some("SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA") } Self::BUFFER_COLLECTION_CREATE_INFO_FUCHSIA => { Some("BUFFER_COLLECTION_CREATE_INFO_FUCHSIA") } Self::IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA => { Some("IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA") } Self::BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA => { Some("BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA") } Self::BUFFER_COLLECTION_PROPERTIES_FUCHSIA => { Some("BUFFER_COLLECTION_PROPERTIES_FUCHSIA") } Self::BUFFER_CONSTRAINTS_INFO_FUCHSIA => Some("BUFFER_CONSTRAINTS_INFO_FUCHSIA"), Self::BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA => { Some("BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA") } Self::IMAGE_CONSTRAINTS_INFO_FUCHSIA => Some("IMAGE_CONSTRAINTS_INFO_FUCHSIA"), Self::IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA => { Some("IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA") } Self::SYSMEM_COLOR_SPACE_FUCHSIA => Some("SYSMEM_COLOR_SPACE_FUCHSIA"), Self::BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA => { Some("BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA") } Self::SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI => { Some("SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI") } Self::PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI => { Some("PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI") } Self::PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI => { Some("PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI") } Self::PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI => { Some("PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI") } Self::MEMORY_GET_REMOTE_ADDRESS_INFO_NV => Some("MEMORY_GET_REMOTE_ADDRESS_INFO_NV"), Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV => { Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV") } Self::PIPELINE_PROPERTIES_IDENTIFIER_EXT => Some("PIPELINE_PROPERTIES_IDENTIFIER_EXT"), Self::PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT") } Self::PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT") } Self::FRAME_BOUNDARY_EXT => Some("FRAME_BOUNDARY_EXT"), Self::PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT") } Self::SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT => { Some("SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT") } Self::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT => { Some("MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT") } Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT => { Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT") } Self::SCREEN_SURFACE_CREATE_INFO_QNX => Some("SCREEN_SURFACE_CREATE_INFO_QNX"), Self::PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT") } Self::PIPELINE_COLOR_WRITE_CREATE_INFO_EXT => { Some("PIPELINE_COLOR_WRITE_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT") } Self::PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR => { Some("PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR") } Self::PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT => { Some("PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT") } Self::IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT => Some("IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT") } Self::PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT => { Some("PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT") } Self::MICROMAP_BUILD_INFO_EXT => Some("MICROMAP_BUILD_INFO_EXT"), Self::MICROMAP_VERSION_INFO_EXT => Some("MICROMAP_VERSION_INFO_EXT"), Self::COPY_MICROMAP_INFO_EXT => Some("COPY_MICROMAP_INFO_EXT"), Self::COPY_MICROMAP_TO_MEMORY_INFO_EXT => Some("COPY_MICROMAP_TO_MEMORY_INFO_EXT"), Self::COPY_MEMORY_TO_MICROMAP_INFO_EXT => Some("COPY_MEMORY_TO_MICROMAP_INFO_EXT"), Self::PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT => { Some("PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT") } Self::PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT") } Self::MICROMAP_CREATE_INFO_EXT => Some("MICROMAP_CREATE_INFO_EXT"), Self::MICROMAP_BUILD_SIZES_INFO_EXT => Some("MICROMAP_BUILD_SIZES_INFO_EXT"), Self::ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT => { Some("ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT") } Self::PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV => { Some("PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV") } Self::PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV") } Self::ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV => { Some("ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV") } Self::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI => { Some("PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI") } Self::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI => { Some("PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI") } Self::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI => { Some("PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI") } Self::PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT") } Self::SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT => { Some("SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT") } Self::PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM => { Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM") } Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR") } Self::DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM => { Some("DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM") } Self::PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM => { Some("PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM") } Self::PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM => { Some("PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM") } Self::PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT => { Some("PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT") } Self::IMAGE_VIEW_SLICED_CREATE_INFO_EXT => Some("IMAGE_VIEW_SLICED_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE => { Some("PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE") } Self::DESCRIPTOR_SET_BINDING_REFERENCE_VALVE => { Some("DESCRIPTOR_SET_BINDING_REFERENCE_VALVE") } Self::DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE => { Some("DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE") } Self::PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT") } Self::PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT => { Some("PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT") } Self::PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM => { Some("PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM") } Self::PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM => { Some("PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM") } Self::RENDER_PASS_STRIPE_BEGIN_INFO_ARM => Some("RENDER_PASS_STRIPE_BEGIN_INFO_ARM"), Self::RENDER_PASS_STRIPE_INFO_ARM => Some("RENDER_PASS_STRIPE_INFO_ARM"), Self::RENDER_PASS_STRIPE_SUBMIT_INFO_ARM => Some("RENDER_PASS_STRIPE_SUBMIT_INFO_ARM"), Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM") } Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM => { Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM") } Self::SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM => { Some("SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM") } Self::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV => { Some("PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV") } Self::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV => { Some("PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV") } Self::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV => { Some("PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV") } Self::COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV => { Some("COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV") } Self::PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV => { Some("PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV") } Self::PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV => { Some("PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV") } Self::PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR") } Self::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT => { Some("PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT") } Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM") } Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM => { Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM") } Self::IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM => { Some("IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM") } Self::PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT => { Some("PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT") } Self::PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT") } Self::EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT => { Some("EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT") } Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT => { Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT") } Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT") } Self::PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT") } Self::RENDER_PASS_CREATION_CONTROL_EXT => Some("RENDER_PASS_CREATION_CONTROL_EXT"), Self::RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT => { Some("RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT") } Self::RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT => { Some("RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT") } Self::DIRECT_DRIVER_LOADING_INFO_LUNARG => Some("DIRECT_DRIVER_LOADING_INFO_LUNARG"), Self::DIRECT_DRIVER_LOADING_LIST_LUNARG => Some("DIRECT_DRIVER_LOADING_LIST_LUNARG"), Self::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT") } Self::PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT => { Some("PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT") } Self::SHADER_MODULE_IDENTIFIER_EXT => Some("SHADER_MODULE_IDENTIFIER_EXT"), Self::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV => { Some("PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV") } Self::PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV") } Self::OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV => Some("OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV"), Self::OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV => { Some("OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV") } Self::OPTICAL_FLOW_SESSION_CREATE_INFO_NV => { Some("OPTICAL_FLOW_SESSION_CREATE_INFO_NV") } Self::OPTICAL_FLOW_EXECUTE_INFO_NV => Some("OPTICAL_FLOW_EXECUTE_INFO_NV"), Self::OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV => { Some("OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV") } Self::PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT => { Some("PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT") } Self::PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT") } Self::PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID => { Some("PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID") } Self::PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID => { Some("PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID") } Self::ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID => { Some("ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID") } Self::PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR => { Some("PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR") } Self::PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR") } Self::RENDERING_AREA_INFO_KHR => Some("RENDERING_AREA_INFO_KHR"), Self::DEVICE_IMAGE_SUBRESOURCE_INFO_KHR => Some("DEVICE_IMAGE_SUBRESOURCE_INFO_KHR"), Self::SUBRESOURCE_LAYOUT_2_KHR => Some("SUBRESOURCE_LAYOUT_2_KHR"), Self::IMAGE_SUBRESOURCE_2_KHR => Some("IMAGE_SUBRESOURCE_2_KHR"), Self::PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR => { Some("PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR") } Self::BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR => { Some("BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR => { Some("PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR") } Self::PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT => { Some("PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT") } Self::PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT => { Some("PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT") } Self::SHADER_CREATE_INFO_EXT => Some("SHADER_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM") } Self::TILE_PROPERTIES_QCOM => Some("TILE_PROPERTIES_QCOM"), Self::PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC => { Some("PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC") } Self::AMIGO_PROFILING_SUBMIT_INFO_SEC => Some("AMIGO_PROFILING_SUBMIT_INFO_SEC"), Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM") } Self::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV => { Some("PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV") } Self::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV => { Some("PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV") } Self::PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV => { Some("PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV") } Self::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT") } Self::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT => { Some("MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT") } Self::LAYER_SETTINGS_CREATE_INFO_EXT => Some("LAYER_SETTINGS_CREATE_INFO_EXT"), Self::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM => { Some("PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM") } Self::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM => { Some("PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM") } Self::PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT => { Some("PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT") } Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT => { Some("PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT") } Self::LATENCY_SLEEP_MODE_INFO_NV => Some("LATENCY_SLEEP_MODE_INFO_NV"), Self::LATENCY_SLEEP_INFO_NV => Some("LATENCY_SLEEP_INFO_NV"), Self::SET_LATENCY_MARKER_INFO_NV => Some("SET_LATENCY_MARKER_INFO_NV"), Self::GET_LATENCY_MARKER_INFO_NV => Some("GET_LATENCY_MARKER_INFO_NV"), Self::LATENCY_TIMINGS_FRAME_REPORT_NV => Some("LATENCY_TIMINGS_FRAME_REPORT_NV"), Self::LATENCY_SUBMISSION_PRESENT_ID_NV => Some("LATENCY_SUBMISSION_PRESENT_ID_NV"), Self::OUT_OF_BAND_QUEUE_TYPE_INFO_NV => Some("OUT_OF_BAND_QUEUE_TYPE_INFO_NV"), Self::SWAPCHAIN_LATENCY_CREATE_INFO_NV => Some("SWAPCHAIN_LATENCY_CREATE_INFO_NV"), Self::LATENCY_SURFACE_CAPABILITIES_NV => Some("LATENCY_SURFACE_CAPABILITIES_NV"), Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR => { Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR") } Self::COOPERATIVE_MATRIX_PROPERTIES_KHR => Some("COOPERATIVE_MATRIX_PROPERTIES_KHR"), Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR") } Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM") } Self::MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM => { Some("MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM") } Self::VIDEO_DECODE_AV1_CAPABILITIES_KHR => Some("VIDEO_DECODE_AV1_CAPABILITIES_KHR"), Self::VIDEO_DECODE_AV1_PICTURE_INFO_KHR => Some("VIDEO_DECODE_AV1_PICTURE_INFO_KHR"), Self::VIDEO_DECODE_AV1_PROFILE_INFO_KHR => Some("VIDEO_DECODE_AV1_PROFILE_INFO_KHR"), Self::VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR => { Some("VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR") } Self::VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR => Some("VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR"), Self::PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR => { Some("PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR") } Self::VIDEO_INLINE_QUERY_INFO_KHR => Some("VIDEO_INLINE_QUERY_INFO_KHR"), Self::PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV => { Some("PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV") } Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM") } Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM => { Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM") } Self::SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM => { Some("SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM") } Self::SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM => { Some("SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM") } Self::PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM") } Self::BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM => Some("BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM"), Self::PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM") } Self::SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM => { Some("SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM") } Self::PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM => { Some("PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM") } Self::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT => { Some("PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT") } Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR") } Self::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR => { Some("PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR => { Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR") } Self::PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR") } Self::SCREEN_BUFFER_PROPERTIES_QNX => Some("SCREEN_BUFFER_PROPERTIES_QNX"), Self::SCREEN_BUFFER_FORMAT_PROPERTIES_QNX => { Some("SCREEN_BUFFER_FORMAT_PROPERTIES_QNX") } Self::IMPORT_SCREEN_BUFFER_INFO_QNX => Some("IMPORT_SCREEN_BUFFER_INFO_QNX"), Self::EXTERNAL_FORMAT_QNX => Some("EXTERNAL_FORMAT_QNX"), Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX => { Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX") } Self::PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT => { Some("PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT") } Self::PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR => { Some("PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR") } Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR => { Some("PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR") } Self::PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR => { Some("PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR") } Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR") } Self::CALIBRATED_TIMESTAMP_INFO_KHR => Some("CALIBRATED_TIMESTAMP_INFO_KHR"), Self::PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR => { Some("PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR") } Self::PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR => { Some("PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR") } Self::PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR => { Some("PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR") } Self::BIND_MEMORY_STATUS_KHR => Some("BIND_MEMORY_STATUS_KHR"), Self::BIND_DESCRIPTOR_SETS_INFO_KHR => Some("BIND_DESCRIPTOR_SETS_INFO_KHR"), Self::PUSH_CONSTANTS_INFO_KHR => Some("PUSH_CONSTANTS_INFO_KHR"), Self::PUSH_DESCRIPTOR_SET_INFO_KHR => Some("PUSH_DESCRIPTOR_SET_INFO_KHR"), Self::PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR => { Some("PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR") } Self::SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT => { Some("SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT") } Self::BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT => { Some("BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT") } Self::PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV => { Some("PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV") } Self::PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV => { Some("PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV") } Self::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV => { Some("PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV") } Self::PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV => { Some("PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV") } Self::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES => { Some("PHYSICAL_DEVICE_SUBGROUP_PROPERTIES") } Self::BIND_BUFFER_MEMORY_INFO => Some("BIND_BUFFER_MEMORY_INFO"), Self::BIND_IMAGE_MEMORY_INFO => Some("BIND_IMAGE_MEMORY_INFO"), Self::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES => { Some("PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES") } Self::MEMORY_DEDICATED_REQUIREMENTS => Some("MEMORY_DEDICATED_REQUIREMENTS"), Self::MEMORY_DEDICATED_ALLOCATE_INFO => Some("MEMORY_DEDICATED_ALLOCATE_INFO"), Self::MEMORY_ALLOCATE_FLAGS_INFO => Some("MEMORY_ALLOCATE_FLAGS_INFO"), Self::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO => { Some("DEVICE_GROUP_RENDER_PASS_BEGIN_INFO") } Self::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO => { Some("DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO") } Self::DEVICE_GROUP_SUBMIT_INFO => Some("DEVICE_GROUP_SUBMIT_INFO"), Self::DEVICE_GROUP_BIND_SPARSE_INFO => Some("DEVICE_GROUP_BIND_SPARSE_INFO"), Self::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO => { Some("BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO") } Self::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO => { Some("BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO") } Self::PHYSICAL_DEVICE_GROUP_PROPERTIES => Some("PHYSICAL_DEVICE_GROUP_PROPERTIES"), Self::DEVICE_GROUP_DEVICE_CREATE_INFO => Some("DEVICE_GROUP_DEVICE_CREATE_INFO"), Self::BUFFER_MEMORY_REQUIREMENTS_INFO_2 => Some("BUFFER_MEMORY_REQUIREMENTS_INFO_2"), Self::IMAGE_MEMORY_REQUIREMENTS_INFO_2 => Some("IMAGE_MEMORY_REQUIREMENTS_INFO_2"), Self::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 => { Some("IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2") } Self::MEMORY_REQUIREMENTS_2 => Some("MEMORY_REQUIREMENTS_2"), Self::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 => Some("SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"), Self::PHYSICAL_DEVICE_FEATURES_2 => Some("PHYSICAL_DEVICE_FEATURES_2"), Self::PHYSICAL_DEVICE_PROPERTIES_2 => Some("PHYSICAL_DEVICE_PROPERTIES_2"), Self::FORMAT_PROPERTIES_2 => Some("FORMAT_PROPERTIES_2"), Self::IMAGE_FORMAT_PROPERTIES_2 => Some("IMAGE_FORMAT_PROPERTIES_2"), Self::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 => { Some("PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2") } Self::QUEUE_FAMILY_PROPERTIES_2 => Some("QUEUE_FAMILY_PROPERTIES_2"), Self::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 => { Some("PHYSICAL_DEVICE_MEMORY_PROPERTIES_2") } Self::SPARSE_IMAGE_FORMAT_PROPERTIES_2 => Some("SPARSE_IMAGE_FORMAT_PROPERTIES_2"), Self::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 => { Some("PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2") } Self::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES => { Some("PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES") } Self::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO => { Some("RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO") } Self::IMAGE_VIEW_USAGE_CREATE_INFO => Some("IMAGE_VIEW_USAGE_CREATE_INFO"), Self::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO => { Some("PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO") } Self::RENDER_PASS_MULTIVIEW_CREATE_INFO => Some("RENDER_PASS_MULTIVIEW_CREATE_INFO"), Self::PHYSICAL_DEVICE_MULTIVIEW_FEATURES => Some("PHYSICAL_DEVICE_MULTIVIEW_FEATURES"), Self::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES => { Some("PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES") } Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES => { Some("PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES") } Self::PROTECTED_SUBMIT_INFO => Some("PROTECTED_SUBMIT_INFO"), Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES => { Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES") } Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES => { Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES") } Self::DEVICE_QUEUE_INFO_2 => Some("DEVICE_QUEUE_INFO_2"), Self::SAMPLER_YCBCR_CONVERSION_CREATE_INFO => { Some("SAMPLER_YCBCR_CONVERSION_CREATE_INFO") } Self::SAMPLER_YCBCR_CONVERSION_INFO => Some("SAMPLER_YCBCR_CONVERSION_INFO"), Self::BIND_IMAGE_PLANE_MEMORY_INFO => Some("BIND_IMAGE_PLANE_MEMORY_INFO"), Self::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO => { Some("IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO") } Self::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES => { Some("PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES") } Self::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES => { Some("SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES") } Self::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO => { Some("DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO") } Self::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO => { Some("PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO") } Self::EXTERNAL_IMAGE_FORMAT_PROPERTIES => Some("EXTERNAL_IMAGE_FORMAT_PROPERTIES"), Self::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO => { Some("PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO") } Self::EXTERNAL_BUFFER_PROPERTIES => Some("EXTERNAL_BUFFER_PROPERTIES"), Self::PHYSICAL_DEVICE_ID_PROPERTIES => Some("PHYSICAL_DEVICE_ID_PROPERTIES"), Self::EXTERNAL_MEMORY_BUFFER_CREATE_INFO => Some("EXTERNAL_MEMORY_BUFFER_CREATE_INFO"), Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO => Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO"), Self::EXPORT_MEMORY_ALLOCATE_INFO => Some("EXPORT_MEMORY_ALLOCATE_INFO"), Self::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO => { Some("PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO") } Self::EXTERNAL_FENCE_PROPERTIES => Some("EXTERNAL_FENCE_PROPERTIES"), Self::EXPORT_FENCE_CREATE_INFO => Some("EXPORT_FENCE_CREATE_INFO"), Self::EXPORT_SEMAPHORE_CREATE_INFO => Some("EXPORT_SEMAPHORE_CREATE_INFO"), Self::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO => { Some("PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO") } Self::EXTERNAL_SEMAPHORE_PROPERTIES => Some("EXTERNAL_SEMAPHORE_PROPERTIES"), Self::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES => { Some("PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES") } Self::DESCRIPTOR_SET_LAYOUT_SUPPORT => Some("DESCRIPTOR_SET_LAYOUT_SUPPORT"), Self::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES") } Self::PHYSICAL_DEVICE_VULKAN_1_1_FEATURES => { Some("PHYSICAL_DEVICE_VULKAN_1_1_FEATURES") } Self::PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES => { Some("PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES") } Self::PHYSICAL_DEVICE_VULKAN_1_2_FEATURES => { Some("PHYSICAL_DEVICE_VULKAN_1_2_FEATURES") } Self::PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES => { Some("PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES") } Self::IMAGE_FORMAT_LIST_CREATE_INFO => Some("IMAGE_FORMAT_LIST_CREATE_INFO"), Self::ATTACHMENT_DESCRIPTION_2 => Some("ATTACHMENT_DESCRIPTION_2"), Self::ATTACHMENT_REFERENCE_2 => Some("ATTACHMENT_REFERENCE_2"), Self::SUBPASS_DESCRIPTION_2 => Some("SUBPASS_DESCRIPTION_2"), Self::SUBPASS_DEPENDENCY_2 => Some("SUBPASS_DEPENDENCY_2"), Self::RENDER_PASS_CREATE_INFO_2 => Some("RENDER_PASS_CREATE_INFO_2"), Self::SUBPASS_BEGIN_INFO => Some("SUBPASS_BEGIN_INFO"), Self::SUBPASS_END_INFO => Some("SUBPASS_END_INFO"), Self::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES => { Some("PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES") } Self::PHYSICAL_DEVICE_DRIVER_PROPERTIES => Some("PHYSICAL_DEVICE_DRIVER_PROPERTIES"), Self::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES") } Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES") } Self::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES => { Some("PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES") } Self::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO => { Some("DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO") } Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES => { Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES") } Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES => { Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES") } Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO => { Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO") } Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT => { Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT") } Self::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES => { Some("PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES") } Self::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE => { Some("SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE") } Self::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES => { Some("PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES") } Self::IMAGE_STENCIL_USAGE_CREATE_INFO => Some("IMAGE_STENCIL_USAGE_CREATE_INFO"), Self::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES => { Some("PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES") } Self::SAMPLER_REDUCTION_MODE_CREATE_INFO => Some("SAMPLER_REDUCTION_MODE_CREATE_INFO"), Self::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES => { Some("PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES") } Self::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES => { Some("PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES") } Self::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO => { Some("FRAMEBUFFER_ATTACHMENTS_CREATE_INFO") } Self::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO => Some("FRAMEBUFFER_ATTACHMENT_IMAGE_INFO"), Self::RENDER_PASS_ATTACHMENT_BEGIN_INFO => Some("RENDER_PASS_ATTACHMENT_BEGIN_INFO"), Self::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES => { Some("PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES") } Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES") } Self::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES => { Some("PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES") } Self::ATTACHMENT_REFERENCE_STENCIL_LAYOUT => { Some("ATTACHMENT_REFERENCE_STENCIL_LAYOUT") } Self::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT => { Some("ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT") } Self::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES => { Some("PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES") } Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES => { Some("PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES") } Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES => { Some("PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES") } Self::SEMAPHORE_TYPE_CREATE_INFO => Some("SEMAPHORE_TYPE_CREATE_INFO"), Self::TIMELINE_SEMAPHORE_SUBMIT_INFO => Some("TIMELINE_SEMAPHORE_SUBMIT_INFO"), Self::SEMAPHORE_WAIT_INFO => Some("SEMAPHORE_WAIT_INFO"), Self::SEMAPHORE_SIGNAL_INFO => Some("SEMAPHORE_SIGNAL_INFO"), Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES => { Some("PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES") } Self::BUFFER_DEVICE_ADDRESS_INFO => Some("BUFFER_DEVICE_ADDRESS_INFO"), Self::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO => { Some("BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO") } Self::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO => { Some("MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO") } Self::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO => { Some("DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO") } Self::PHYSICAL_DEVICE_VULKAN_1_3_FEATURES => { Some("PHYSICAL_DEVICE_VULKAN_1_3_FEATURES") } Self::PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES => { Some("PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES") } Self::PIPELINE_CREATION_FEEDBACK_CREATE_INFO => { Some("PIPELINE_CREATION_FEEDBACK_CREATE_INFO") } Self::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES") } Self::PHYSICAL_DEVICE_TOOL_PROPERTIES => Some("PHYSICAL_DEVICE_TOOL_PROPERTIES"), Self::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES") } Self::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES => { Some("PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES") } Self::DEVICE_PRIVATE_DATA_CREATE_INFO => Some("DEVICE_PRIVATE_DATA_CREATE_INFO"), Self::PRIVATE_DATA_SLOT_CREATE_INFO => Some("PRIVATE_DATA_SLOT_CREATE_INFO"), Self::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES => { Some("PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES") } Self::MEMORY_BARRIER_2 => Some("MEMORY_BARRIER_2"), Self::BUFFER_MEMORY_BARRIER_2 => Some("BUFFER_MEMORY_BARRIER_2"), Self::IMAGE_MEMORY_BARRIER_2 => Some("IMAGE_MEMORY_BARRIER_2"), Self::DEPENDENCY_INFO => Some("DEPENDENCY_INFO"), Self::SUBMIT_INFO_2 => Some("SUBMIT_INFO_2"), Self::SEMAPHORE_SUBMIT_INFO => Some("SEMAPHORE_SUBMIT_INFO"), Self::COMMAND_BUFFER_SUBMIT_INFO => Some("COMMAND_BUFFER_SUBMIT_INFO"), Self::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES => { Some("PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES") } Self::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES => { Some("PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES") } Self::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES => { Some("PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES") } Self::COPY_BUFFER_INFO_2 => Some("COPY_BUFFER_INFO_2"), Self::COPY_IMAGE_INFO_2 => Some("COPY_IMAGE_INFO_2"), Self::COPY_BUFFER_TO_IMAGE_INFO_2 => Some("COPY_BUFFER_TO_IMAGE_INFO_2"), Self::COPY_IMAGE_TO_BUFFER_INFO_2 => Some("COPY_IMAGE_TO_BUFFER_INFO_2"), Self::BLIT_IMAGE_INFO_2 => Some("BLIT_IMAGE_INFO_2"), Self::RESOLVE_IMAGE_INFO_2 => Some("RESOLVE_IMAGE_INFO_2"), Self::BUFFER_COPY_2 => Some("BUFFER_COPY_2"), Self::IMAGE_COPY_2 => Some("IMAGE_COPY_2"), Self::IMAGE_BLIT_2 => Some("IMAGE_BLIT_2"), Self::BUFFER_IMAGE_COPY_2 => Some("BUFFER_IMAGE_COPY_2"), Self::IMAGE_RESOLVE_2 => Some("IMAGE_RESOLVE_2"), Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES => { Some("PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES") } Self::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO => { Some("PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO") } Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES => { Some("PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES") } Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES => { Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES") } Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES => { Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES") } Self::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK => { Some("WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK") } Self::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO => { Some("DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO") } Self::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES => { Some("PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES") } Self::RENDERING_INFO => Some("RENDERING_INFO"), Self::RENDERING_ATTACHMENT_INFO => Some("RENDERING_ATTACHMENT_INFO"), Self::PIPELINE_RENDERING_CREATE_INFO => Some("PIPELINE_RENDERING_CREATE_INFO"), Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES => { Some("PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES") } Self::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO => { Some("COMMAND_BUFFER_INHERITANCE_RENDERING_INFO") } Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES => { Some("PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES") } Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES => { Some("PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES") } Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES => { Some("PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES") } Self::FORMAT_PROPERTIES_3 => Some("FORMAT_PROPERTIES_3"), Self::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES => { Some("PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES") } Self::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES => { Some("PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES") } Self::DEVICE_BUFFER_MEMORY_REQUIREMENTS => Some("DEVICE_BUFFER_MEMORY_REQUIREMENTS"), Self::DEVICE_IMAGE_MEMORY_REQUIREMENTS => Some("DEVICE_IMAGE_MEMORY_REQUIREMENTS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SubgroupFeatureFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (SubgroupFeatureFlags::BASIC.0, "BASIC"), (SubgroupFeatureFlags::VOTE.0, "VOTE"), (SubgroupFeatureFlags::ARITHMETIC.0, "ARITHMETIC"), (SubgroupFeatureFlags::BALLOT.0, "BALLOT"), (SubgroupFeatureFlags::SHUFFLE.0, "SHUFFLE"), (SubgroupFeatureFlags::SHUFFLE_RELATIVE.0, "SHUFFLE_RELATIVE"), (SubgroupFeatureFlags::CLUSTERED.0, "CLUSTERED"), (SubgroupFeatureFlags::QUAD.0, "QUAD"), (SubgroupFeatureFlags::PARTITIONED_NV.0, "PARTITIONED_NV"), (SubgroupFeatureFlags::ROTATE_KHR.0, "ROTATE_KHR"), ( SubgroupFeatureFlags::ROTATE_CLUSTERED_KHR.0, "ROTATE_CLUSTERED_KHR", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SubmitFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SubmitFlags::PROTECTED.0, "PROTECTED")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SubpassContents { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::INLINE => Some("INLINE"), Self::SECONDARY_COMMAND_BUFFERS => Some("SECONDARY_COMMAND_BUFFERS"), Self::INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT => { Some("INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT") } _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SubpassDescriptionFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( SubpassDescriptionFlags::PER_VIEW_ATTRIBUTES_NVX.0, "PER_VIEW_ATTRIBUTES_NVX", ), ( SubpassDescriptionFlags::PER_VIEW_POSITION_X_ONLY_NVX.0, "PER_VIEW_POSITION_X_ONLY_NVX", ), ( SubpassDescriptionFlags::FRAGMENT_REGION_QCOM.0, "FRAGMENT_REGION_QCOM", ), ( SubpassDescriptionFlags::SHADER_RESOLVE_QCOM.0, "SHADER_RESOLVE_QCOM", ), ( SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT.0, "RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT", ), ( SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT.0, "RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT", ), ( SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT.0, "RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT", ), ( SubpassDescriptionFlags::ENABLE_LEGACY_DITHERING_EXT.0, "ENABLE_LEGACY_DITHERING_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SubpassMergeStatusEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::MERGED => Some("MERGED"), Self::DISALLOWED => Some("DISALLOWED"), Self::NOT_MERGED_SIDE_EFFECTS => Some("NOT_MERGED_SIDE_EFFECTS"), Self::NOT_MERGED_SAMPLES_MISMATCH => Some("NOT_MERGED_SAMPLES_MISMATCH"), Self::NOT_MERGED_VIEWS_MISMATCH => Some("NOT_MERGED_VIEWS_MISMATCH"), Self::NOT_MERGED_ALIASING => Some("NOT_MERGED_ALIASING"), Self::NOT_MERGED_DEPENDENCIES => Some("NOT_MERGED_DEPENDENCIES"), Self::NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT => { Some("NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT") } Self::NOT_MERGED_TOO_MANY_ATTACHMENTS => Some("NOT_MERGED_TOO_MANY_ATTACHMENTS"), Self::NOT_MERGED_INSUFFICIENT_STORAGE => Some("NOT_MERGED_INSUFFICIENT_STORAGE"), Self::NOT_MERGED_DEPTH_STENCIL_COUNT => Some("NOT_MERGED_DEPTH_STENCIL_COUNT"), Self::NOT_MERGED_RESOLVE_ATTACHMENT_REUSE => { Some("NOT_MERGED_RESOLVE_ATTACHMENT_REUSE") } Self::NOT_MERGED_SINGLE_SUBPASS => Some("NOT_MERGED_SINGLE_SUBPASS"), Self::NOT_MERGED_UNSPECIFIED => Some("NOT_MERGED_UNSPECIFIED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for SurfaceCounterFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SurfaceCounterFlagsEXT::VBLANK.0, "VBLANK")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SurfaceTransformFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (SurfaceTransformFlagsKHR::IDENTITY.0, "IDENTITY"), (SurfaceTransformFlagsKHR::ROTATE_90.0, "ROTATE_90"), (SurfaceTransformFlagsKHR::ROTATE_180.0, "ROTATE_180"), (SurfaceTransformFlagsKHR::ROTATE_270.0, "ROTATE_270"), ( SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR.0, "HORIZONTAL_MIRROR", ), ( SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_90.0, "HORIZONTAL_MIRROR_ROTATE_90", ), ( SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_180.0, "HORIZONTAL_MIRROR_ROTATE_180", ), ( SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_270.0, "HORIZONTAL_MIRROR_ROTATE_270", ), (SurfaceTransformFlagsKHR::INHERIT.0, "INHERIT"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SwapchainCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( SwapchainCreateFlagsKHR::SPLIT_INSTANCE_BIND_REGIONS.0, "SPLIT_INSTANCE_BIND_REGIONS", ), (SwapchainCreateFlagsKHR::PROTECTED.0, "PROTECTED"), (SwapchainCreateFlagsKHR::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"), ( SwapchainCreateFlagsKHR::DEFERRED_MEMORY_ALLOCATION_EXT.0, "DEFERRED_MEMORY_ALLOCATION_EXT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SwapchainImageUsageFlagsANDROID { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[(SwapchainImageUsageFlagsANDROID::SHARED.0, "SHARED")]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for SystemAllocationScope { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::COMMAND => Some("COMMAND"), Self::OBJECT => Some("OBJECT"), Self::CACHE => Some("CACHE"), Self::DEVICE => Some("DEVICE"), Self::INSTANCE => Some("INSTANCE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for TessellationDomainOrigin { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UPPER_LEFT => Some("UPPER_LEFT"), Self::LOWER_LEFT => Some("LOWER_LEFT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for TimeDomainKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEVICE => Some("DEVICE"), Self::CLOCK_MONOTONIC => Some("CLOCK_MONOTONIC"), Self::CLOCK_MONOTONIC_RAW => Some("CLOCK_MONOTONIC_RAW"), Self::QUERY_PERFORMANCE_COUNTER => Some("QUERY_PERFORMANCE_COUNTER"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ToolPurposeFlags { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (ToolPurposeFlags::VALIDATION.0, "VALIDATION"), (ToolPurposeFlags::PROFILING.0, "PROFILING"), (ToolPurposeFlags::TRACING.0, "TRACING"), ( ToolPurposeFlags::ADDITIONAL_FEATURES.0, "ADDITIONAL_FEATURES", ), (ToolPurposeFlags::MODIFYING_FEATURES.0, "MODIFYING_FEATURES"), ( ToolPurposeFlags::DEBUG_REPORTING_EXT.0, "DEBUG_REPORTING_EXT", ), (ToolPurposeFlags::DEBUG_MARKERS_EXT.0, "DEBUG_MARKERS_EXT"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ValidationCacheCreateFlagsEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ValidationCacheHeaderVersionEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ONE => Some("ONE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ValidationCheckEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ALL => Some("ALL"), Self::SHADERS => Some("SHADERS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ValidationFeatureDisableEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::ALL => Some("ALL"), Self::SHADERS => Some("SHADERS"), Self::THREAD_SAFETY => Some("THREAD_SAFETY"), Self::API_PARAMETERS => Some("API_PARAMETERS"), Self::OBJECT_LIFETIMES => Some("OBJECT_LIFETIMES"), Self::CORE_CHECKS => Some("CORE_CHECKS"), Self::UNIQUE_HANDLES => Some("UNIQUE_HANDLES"), Self::SHADER_VALIDATION_CACHE => Some("SHADER_VALIDATION_CACHE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ValidationFeatureEnableEXT { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::GPU_ASSISTED => Some("GPU_ASSISTED"), Self::GPU_ASSISTED_RESERVE_BINDING_SLOT => Some("GPU_ASSISTED_RESERVE_BINDING_SLOT"), Self::BEST_PRACTICES => Some("BEST_PRACTICES"), Self::DEBUG_PRINTF => Some("DEBUG_PRINTF"), Self::SYNCHRONIZATION_VALIDATION => Some("SYNCHRONIZATION_VALIDATION"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for VendorId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::VIV => Some("VIV"), Self::VSI => Some("VSI"), Self::KAZAN => Some("KAZAN"), Self::CODEPLAY => Some("CODEPLAY"), Self::MESA => Some("MESA"), Self::POCL => Some("POCL"), Self::MOBILEYE => Some("MOBILEYE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for VertexInputRate { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::VERTEX => Some("VERTEX"), Self::INSTANCE => Some("INSTANCE"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for ViSurfaceCreateFlagsNN { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoBeginCodingFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoCapabilityFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoCapabilityFlagsKHR::PROTECTED_CONTENT.0, "PROTECTED_CONTENT", ), ( VideoCapabilityFlagsKHR::SEPARATE_REFERENCE_IMAGES.0, "SEPARATE_REFERENCE_IMAGES", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoChromaSubsamplingFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoChromaSubsamplingFlagsKHR::INVALID.0, "INVALID"), (VideoChromaSubsamplingFlagsKHR::MONOCHROME.0, "MONOCHROME"), (VideoChromaSubsamplingFlagsKHR::TYPE_420.0, "TYPE_420"), (VideoChromaSubsamplingFlagsKHR::TYPE_422.0, "TYPE_422"), (VideoChromaSubsamplingFlagsKHR::TYPE_444.0, "TYPE_444"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoCodecOperationFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoCodecOperationFlagsKHR::NONE.0, "NONE"), (VideoCodecOperationFlagsKHR::ENCODE_H264.0, "ENCODE_H264"), (VideoCodecOperationFlagsKHR::ENCODE_H265.0, "ENCODE_H265"), (VideoCodecOperationFlagsKHR::DECODE_H264.0, "DECODE_H264"), (VideoCodecOperationFlagsKHR::DECODE_H265.0, "DECODE_H265"), (VideoCodecOperationFlagsKHR::DECODE_AV1.0, "DECODE_AV1"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoCodingControlFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoCodingControlFlagsKHR::RESET.0, "RESET"), ( VideoCodingControlFlagsKHR::ENCODE_RATE_CONTROL.0, "ENCODE_RATE_CONTROL", ), ( VideoCodingControlFlagsKHR::ENCODE_QUALITY_LEVEL.0, "ENCODE_QUALITY_LEVEL", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoComponentBitDepthFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoComponentBitDepthFlagsKHR::INVALID.0, "INVALID"), (VideoComponentBitDepthFlagsKHR::TYPE_8.0, "TYPE_8"), (VideoComponentBitDepthFlagsKHR::TYPE_10.0, "TYPE_10"), (VideoComponentBitDepthFlagsKHR::TYPE_12.0, "TYPE_12"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoDecodeCapabilityFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoDecodeCapabilityFlagsKHR::DPB_AND_OUTPUT_COINCIDE.0, "DPB_AND_OUTPUT_COINCIDE", ), ( VideoDecodeCapabilityFlagsKHR::DPB_AND_OUTPUT_DISTINCT.0, "DPB_AND_OUTPUT_DISTINCT", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoDecodeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoDecodeH264PictureLayoutFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoDecodeH264PictureLayoutFlagsKHR::PROGRESSIVE.0, "PROGRESSIVE", ), ( VideoDecodeH264PictureLayoutFlagsKHR::INTERLACED_INTERLEAVED_LINES.0, "INTERLACED_INTERLEAVED_LINES", ), ( VideoDecodeH264PictureLayoutFlagsKHR::INTERLACED_SEPARATE_PLANES.0, "INTERLACED_SEPARATE_PLANES", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoDecodeUsageFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoDecodeUsageFlagsKHR::DEFAULT.0, "DEFAULT"), (VideoDecodeUsageFlagsKHR::TRANSCODING.0, "TRANSCODING"), (VideoDecodeUsageFlagsKHR::OFFLINE.0, "OFFLINE"), (VideoDecodeUsageFlagsKHR::STREAMING.0, "STREAMING"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeCapabilityFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeCapabilityFlagsKHR::PRECEDING_EXTERNALLY_ENCODED_BYTES.0, "PRECEDING_EXTERNALLY_ENCODED_BYTES", ), ( VideoEncodeCapabilityFlagsKHR::INSUFFICIENTSTREAM_BUFFER_RANGE_DETECTION.0, "INSUFFICIENTSTREAM_BUFFER_RANGE_DETECTION", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeContentFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoEncodeContentFlagsKHR::DEFAULT.0, "DEFAULT"), (VideoEncodeContentFlagsKHR::CAMERA.0, "CAMERA"), (VideoEncodeContentFlagsKHR::DESKTOP.0, "DESKTOP"), (VideoEncodeContentFlagsKHR::RENDERED.0, "RENDERED"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeFeedbackFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeFeedbackFlagsKHR::BITSTREAM_BUFFER_OFFSET.0, "BITSTREAM_BUFFER_OFFSET", ), ( VideoEncodeFeedbackFlagsKHR::BITSTREAM_BYTES_WRITTEN.0, "BITSTREAM_BYTES_WRITTEN", ), ( VideoEncodeFeedbackFlagsKHR::BITSTREAM_HAS_OVERRIDES.0, "BITSTREAM_HAS_OVERRIDES", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH264CapabilityFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH264CapabilityFlagsKHR::HRD_COMPLIANCE.0, "HRD_COMPLIANCE", ), ( VideoEncodeH264CapabilityFlagsKHR::PREDICTION_WEIGHT_TABLE_GENERATED.0, "PREDICTION_WEIGHT_TABLE_GENERATED", ), ( VideoEncodeH264CapabilityFlagsKHR::ROW_UNALIGNED_SLICE.0, "ROW_UNALIGNED_SLICE", ), ( VideoEncodeH264CapabilityFlagsKHR::DIFFERENT_SLICE_TYPE.0, "DIFFERENT_SLICE_TYPE", ), ( VideoEncodeH264CapabilityFlagsKHR::B_FRAME_IN_L0_LIST.0, "B_FRAME_IN_L0_LIST", ), ( VideoEncodeH264CapabilityFlagsKHR::B_FRAME_IN_L1_LIST.0, "B_FRAME_IN_L1_LIST", ), ( VideoEncodeH264CapabilityFlagsKHR::PER_PICTURE_TYPE_MIN_MAX_QP.0, "PER_PICTURE_TYPE_MIN_MAX_QP", ), ( VideoEncodeH264CapabilityFlagsKHR::PER_SLICE_CONSTANT_QP.0, "PER_SLICE_CONSTANT_QP", ), ( VideoEncodeH264CapabilityFlagsKHR::GENERATE_PREFIX_NALU.0, "GENERATE_PREFIX_NALU", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH264RateControlFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH264RateControlFlagsKHR::ATTEMPT_HRD_COMPLIANCE.0, "ATTEMPT_HRD_COMPLIANCE", ), ( VideoEncodeH264RateControlFlagsKHR::REGULAR_GOP.0, "REGULAR_GOP", ), ( VideoEncodeH264RateControlFlagsKHR::REFERENCE_PATTERN_FLAT.0, "REFERENCE_PATTERN_FLAT", ), ( VideoEncodeH264RateControlFlagsKHR::REFERENCE_PATTERN_DYADIC.0, "REFERENCE_PATTERN_DYADIC", ), ( VideoEncodeH264RateControlFlagsKHR::TEMPORAL_LAYER_PATTERN_DYADIC.0, "TEMPORAL_LAYER_PATTERN_DYADIC", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH264StdFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH264StdFlagsKHR::SEPARATE_COLOR_PLANE_FLAG_SET.0, "SEPARATE_COLOR_PLANE_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET.0, "QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::SCALING_MATRIX_PRESENT_FLAG_SET.0, "SCALING_MATRIX_PRESENT_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::CHROMA_QP_INDEX_OFFSET.0, "CHROMA_QP_INDEX_OFFSET", ), ( VideoEncodeH264StdFlagsKHR::SECOND_CHROMA_QP_INDEX_OFFSET.0, "SECOND_CHROMA_QP_INDEX_OFFSET", ), ( VideoEncodeH264StdFlagsKHR::PIC_INIT_QP_MINUS26.0, "PIC_INIT_QP_MINUS26", ), ( VideoEncodeH264StdFlagsKHR::WEIGHTED_PRED_FLAG_SET.0, "WEIGHTED_PRED_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::WEIGHTED_BIPRED_IDC_EXPLICIT.0, "WEIGHTED_BIPRED_IDC_EXPLICIT", ), ( VideoEncodeH264StdFlagsKHR::WEIGHTED_BIPRED_IDC_IMPLICIT.0, "WEIGHTED_BIPRED_IDC_IMPLICIT", ), ( VideoEncodeH264StdFlagsKHR::TRANSFORM_8X8_MODE_FLAG_SET.0, "TRANSFORM_8X8_MODE_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::DIRECT_SPATIAL_MV_PRED_FLAG_UNSET.0, "DIRECT_SPATIAL_MV_PRED_FLAG_UNSET", ), ( VideoEncodeH264StdFlagsKHR::ENTROPY_CODING_MODE_FLAG_UNSET.0, "ENTROPY_CODING_MODE_FLAG_UNSET", ), ( VideoEncodeH264StdFlagsKHR::ENTROPY_CODING_MODE_FLAG_SET.0, "ENTROPY_CODING_MODE_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::DIRECT_8X8_INFERENCE_FLAG_UNSET.0, "DIRECT_8X8_INFERENCE_FLAG_UNSET", ), ( VideoEncodeH264StdFlagsKHR::CONSTRAINED_INTRA_PRED_FLAG_SET.0, "CONSTRAINED_INTRA_PRED_FLAG_SET", ), ( VideoEncodeH264StdFlagsKHR::DEBLOCKING_FILTER_DISABLED.0, "DEBLOCKING_FILTER_DISABLED", ), ( VideoEncodeH264StdFlagsKHR::DEBLOCKING_FILTER_ENABLED.0, "DEBLOCKING_FILTER_ENABLED", ), ( VideoEncodeH264StdFlagsKHR::DEBLOCKING_FILTER_PARTIAL.0, "DEBLOCKING_FILTER_PARTIAL", ), ( VideoEncodeH264StdFlagsKHR::SLICE_QP_DELTA.0, "SLICE_QP_DELTA", ), ( VideoEncodeH264StdFlagsKHR::DIFFERENT_SLICE_QP_DELTA.0, "DIFFERENT_SLICE_QP_DELTA", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH265CapabilityFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH265CapabilityFlagsKHR::HRD_COMPLIANCE.0, "HRD_COMPLIANCE", ), ( VideoEncodeH265CapabilityFlagsKHR::PREDICTION_WEIGHT_TABLE_GENERATED.0, "PREDICTION_WEIGHT_TABLE_GENERATED", ), ( VideoEncodeH265CapabilityFlagsKHR::ROW_UNALIGNED_SLICE_SEGMENT.0, "ROW_UNALIGNED_SLICE_SEGMENT", ), ( VideoEncodeH265CapabilityFlagsKHR::DIFFERENT_SLICE_SEGMENT_TYPE.0, "DIFFERENT_SLICE_SEGMENT_TYPE", ), ( VideoEncodeH265CapabilityFlagsKHR::B_FRAME_IN_L0_LIST.0, "B_FRAME_IN_L0_LIST", ), ( VideoEncodeH265CapabilityFlagsKHR::B_FRAME_IN_L1_LIST.0, "B_FRAME_IN_L1_LIST", ), ( VideoEncodeH265CapabilityFlagsKHR::PER_PICTURE_TYPE_MIN_MAX_QP.0, "PER_PICTURE_TYPE_MIN_MAX_QP", ), ( VideoEncodeH265CapabilityFlagsKHR::PER_SLICE_SEGMENT_CONSTANT_QP.0, "PER_SLICE_SEGMENT_CONSTANT_QP", ), ( VideoEncodeH265CapabilityFlagsKHR::MULTIPLE_TILES_PER_SLICE_SEGMENT.0, "MULTIPLE_TILES_PER_SLICE_SEGMENT", ), ( VideoEncodeH265CapabilityFlagsKHR::MULTIPLE_SLICE_SEGMENTS_PER_TILE.0, "MULTIPLE_SLICE_SEGMENTS_PER_TILE", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH265CtbSizeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoEncodeH265CtbSizeFlagsKHR::TYPE_16.0, "TYPE_16"), (VideoEncodeH265CtbSizeFlagsKHR::TYPE_32.0, "TYPE_32"), (VideoEncodeH265CtbSizeFlagsKHR::TYPE_64.0, "TYPE_64"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH265RateControlFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH265RateControlFlagsKHR::ATTEMPT_HRD_COMPLIANCE.0, "ATTEMPT_HRD_COMPLIANCE", ), ( VideoEncodeH265RateControlFlagsKHR::REGULAR_GOP.0, "REGULAR_GOP", ), ( VideoEncodeH265RateControlFlagsKHR::REFERENCE_PATTERN_FLAT.0, "REFERENCE_PATTERN_FLAT", ), ( VideoEncodeH265RateControlFlagsKHR::REFERENCE_PATTERN_DYADIC.0, "REFERENCE_PATTERN_DYADIC", ), ( VideoEncodeH265RateControlFlagsKHR::TEMPORAL_SUB_LAYER_PATTERN_DYADIC.0, "TEMPORAL_SUB_LAYER_PATTERN_DYADIC", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH265StdFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH265StdFlagsKHR::SEPARATE_COLOR_PLANE_FLAG_SET.0, "SEPARATE_COLOR_PLANE_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET.0, "SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::SCALING_LIST_DATA_PRESENT_FLAG_SET.0, "SCALING_LIST_DATA_PRESENT_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::PCM_ENABLED_FLAG_SET.0, "PCM_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::SPS_TEMPORAL_MVP_ENABLED_FLAG_SET.0, "SPS_TEMPORAL_MVP_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::INIT_QP_MINUS26.0, "INIT_QP_MINUS26", ), ( VideoEncodeH265StdFlagsKHR::WEIGHTED_PRED_FLAG_SET.0, "WEIGHTED_PRED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::WEIGHTED_BIPRED_FLAG_SET.0, "WEIGHTED_BIPRED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::LOG2_PARALLEL_MERGE_LEVEL_MINUS2.0, "LOG2_PARALLEL_MERGE_LEVEL_MINUS2", ), ( VideoEncodeH265StdFlagsKHR::SIGN_DATA_HIDING_ENABLED_FLAG_SET.0, "SIGN_DATA_HIDING_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::TRANSFORM_SKIP_ENABLED_FLAG_SET.0, "TRANSFORM_SKIP_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::TRANSFORM_SKIP_ENABLED_FLAG_UNSET.0, "TRANSFORM_SKIP_ENABLED_FLAG_UNSET", ), ( VideoEncodeH265StdFlagsKHR::PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET.0, "PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::TRANSQUANT_BYPASS_ENABLED_FLAG_SET.0, "TRANSQUANT_BYPASS_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::CONSTRAINED_INTRA_PRED_FLAG_SET.0, "CONSTRAINED_INTRA_PRED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::ENTROPY_CODING_SYNC_ENABLED_FLAG_SET.0, "ENTROPY_CODING_SYNC_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET.0, "DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET.0, "DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::DEPENDENT_SLICE_SEGMENT_FLAG_SET.0, "DEPENDENT_SLICE_SEGMENT_FLAG_SET", ), ( VideoEncodeH265StdFlagsKHR::SLICE_QP_DELTA.0, "SLICE_QP_DELTA", ), ( VideoEncodeH265StdFlagsKHR::DIFFERENT_SLICE_QP_DELTA.0, "DIFFERENT_SLICE_QP_DELTA", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeH265TransformBlockSizeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoEncodeH265TransformBlockSizeFlagsKHR::TYPE_4.0, "TYPE_4", ), ( VideoEncodeH265TransformBlockSizeFlagsKHR::TYPE_8.0, "TYPE_8", ), ( VideoEncodeH265TransformBlockSizeFlagsKHR::TYPE_16.0, "TYPE_16", ), ( VideoEncodeH265TransformBlockSizeFlagsKHR::TYPE_32.0, "TYPE_32", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeRateControlFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeRateControlModeFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoEncodeRateControlModeFlagsKHR::DEFAULT.0, "DEFAULT"), (VideoEncodeRateControlModeFlagsKHR::DISABLED.0, "DISABLED"), (VideoEncodeRateControlModeFlagsKHR::CBR.0, "CBR"), (VideoEncodeRateControlModeFlagsKHR::VBR.0, "VBR"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEncodeTuningModeKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::DEFAULT => Some("DEFAULT"), Self::HIGH_QUALITY => Some("HIGH_QUALITY"), Self::LOW_LATENCY => Some("LOW_LATENCY"), Self::ULTRA_LOW_LATENCY => Some("ULTRA_LOW_LATENCY"), Self::LOSSLESS => Some("LOSSLESS"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for VideoEncodeUsageFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ (VideoEncodeUsageFlagsKHR::DEFAULT.0, "DEFAULT"), (VideoEncodeUsageFlagsKHR::TRANSCODING.0, "TRANSCODING"), (VideoEncodeUsageFlagsKHR::STREAMING.0, "STREAMING"), (VideoEncodeUsageFlagsKHR::RECORDING.0, "RECORDING"), (VideoEncodeUsageFlagsKHR::CONFERENCING.0, "CONFERENCING"), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoEndCodingFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoSessionCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[ ( VideoSessionCreateFlagsKHR::PROTECTED_CONTENT.0, "PROTECTED_CONTENT", ), ( VideoSessionCreateFlagsKHR::ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS.0, "ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS", ), ( VideoSessionCreateFlagsKHR::INLINE_QUERIES.0, "INLINE_QUERIES", ), ]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for VideoSessionParametersCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for ViewportCoordinateSwizzleNV { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::POSITIVE_X => Some("POSITIVE_X"), Self::NEGATIVE_X => Some("NEGATIVE_X"), Self::POSITIVE_Y => Some("POSITIVE_Y"), Self::NEGATIVE_Y => Some("NEGATIVE_Y"), Self::POSITIVE_Z => Some("POSITIVE_Z"), Self::NEGATIVE_Z => Some("NEGATIVE_Z"), Self::POSITIVE_W => Some("POSITIVE_W"), Self::NEGATIVE_W => Some("NEGATIVE_W"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for WaylandSurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for Win32SurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for XcbSurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } impl fmt::Debug for XlibSurfaceCreateFlagsKHR { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { const KNOWN: &[(Flags, &str)] = &[]; debug_flags(f, KNOWN, self.0) } } ash-0.38.0+1.3.281/src/vk/constants.rs000064400000000000000000000024751046102023000150760ustar 00000000000000use crate::vk::definitions::*; pub const MAX_PHYSICAL_DEVICE_NAME_SIZE: usize = 256; pub const UUID_SIZE: usize = 16; pub const LUID_SIZE: usize = 8; pub const MAX_EXTENSION_NAME_SIZE: usize = 256; pub const MAX_DESCRIPTION_SIZE: usize = 256; pub const MAX_MEMORY_TYPES: usize = 32; #[doc = "The maximum number of unique memory heaps, each of which supporting 1 or more memory types"] pub const MAX_MEMORY_HEAPS: usize = 16; pub const LOD_CLAMP_NONE: f32 = 1000.00; pub const REMAINING_MIP_LEVELS: u32 = !0; pub const REMAINING_ARRAY_LAYERS: u32 = !0; pub const REMAINING_3D_SLICES_EXT: u32 = !0; pub const WHOLE_SIZE: u64 = !0; pub const ATTACHMENT_UNUSED: u32 = !0; pub const TRUE: Bool32 = 1; pub const FALSE: Bool32 = 0; pub const QUEUE_FAMILY_IGNORED: u32 = !0; pub const QUEUE_FAMILY_EXTERNAL: u32 = !1; pub const QUEUE_FAMILY_FOREIGN_EXT: u32 = !2; pub const SUBPASS_EXTERNAL: u32 = !0; pub const MAX_DEVICE_GROUP_SIZE: usize = 32; pub const MAX_DRIVER_NAME_SIZE: usize = 256; pub const MAX_DRIVER_INFO_SIZE: usize = 256; pub const SHADER_UNUSED_KHR: u32 = !0; pub const MAX_GLOBAL_PRIORITY_SIZE_KHR: usize = 16; pub const MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT: usize = 32; pub const MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR: usize = 7; pub const SHADER_INDEX_UNUSED_AMDX: u32 = !0; pub const SHADER_UNUSED_NV: u32 = SHADER_UNUSED_KHR; ash-0.38.0+1.3.281/src/vk/definitions.rs000064400000000000000000101672651046102023000154070ustar 00000000000000use crate::vk::aliases::*; use crate::vk::bitflags::*; use crate::vk::constants::*; use crate::vk::enums::*; use crate::vk::native::*; use crate::vk::platform_types::*; use crate::vk::prelude::*; use crate::vk::{ptr_chain_iter, Handle}; use core::ffi::*; use core::fmt; use core::marker::PhantomData; #[deprecated = "This define is deprecated. VK_MAKE_API_VERSION should be used instead."] #[doc = ""] pub const fn make_version(major: u32, minor: u32, patch: u32) -> u32 { ((major) << 22) | ((minor) << 12) | (patch) } #[deprecated = "This define is deprecated. VK_API_VERSION_MAJOR should be used instead."] #[doc = ""] pub const fn version_major(version: u32) -> u32 { (version) >> 22 } #[deprecated = "This define is deprecated. VK_API_VERSION_MINOR should be used instead."] #[doc = ""] pub const fn version_minor(version: u32) -> u32 { ((version) >> 12) & 0x3ffu32 } #[deprecated = "This define is deprecated. VK_API_VERSION_PATCH should be used instead."] #[doc = ""] pub const fn version_patch(version: u32) -> u32 { (version) & 0xfffu32 } #[doc = ""] pub const fn make_api_version(variant: u32, major: u32, minor: u32, patch: u32) -> u32 { ((variant) << 29) | ((major) << 22) | ((minor) << 12) | (patch) } #[doc = ""] pub const fn api_version_variant(version: u32) -> u32 { (version) >> 29 } #[doc = ""] pub const fn api_version_major(version: u32) -> u32 { ((version) >> 22) & 0x7fu32 } #[doc = ""] pub const fn api_version_minor(version: u32) -> u32 { ((version) >> 12) & 0x3ffu32 } #[doc = ""] pub const fn api_version_patch(version: u32) -> u32 { (version) & 0xfffu32 } #[doc = ""] pub const API_VERSION_1_0: u32 = make_api_version(0, 1, 0, 0); #[doc = ""] pub const API_VERSION_1_1: u32 = make_api_version(0, 1, 1, 0); #[doc = ""] pub const API_VERSION_1_2: u32 = make_api_version(0, 1, 2, 0); #[doc = ""] pub const API_VERSION_1_3: u32 = make_api_version(0, 1, 3, 0); #[doc = ""] pub const HEADER_VERSION: u32 = 281; #[doc = ""] pub const HEADER_VERSION_COMPLETE: u32 = make_api_version(0, 1, 3, HEADER_VERSION); #[doc = ""] pub type SampleMask = u32; #[doc = ""] pub type Bool32 = u32; #[doc = ""] pub type Flags = u32; #[doc = ""] pub type Flags64 = u64; #[doc = ""] pub type DeviceSize = u64; #[doc = ""] pub type DeviceAddress = u64; #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct QueryPoolCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(QueryPoolCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineDynamicStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineDynamicStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineMultisampleStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineMultisampleStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineRasterizationStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineRasterizationStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineViewportStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineViewportStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineTessellationStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineTessellationStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineInputAssemblyStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineInputAssemblyStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineVertexInputStateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(PipelineVertexInputStateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct BufferViewCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(BufferViewCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(DeviceCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DescriptorPoolResetFlags(pub(crate) Flags); vk_bitflags_wrapped!(DescriptorPoolResetFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DescriptorUpdateTemplateCreateFlags(pub(crate) Flags); vk_bitflags_wrapped!(DescriptorUpdateTemplateCreateFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AccelerationStructureMotionInfoFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(AccelerationStructureMotionInfoFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AccelerationStructureMotionInstanceFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(AccelerationStructureMotionInstanceFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DirectDriverLoadingFlagsLUNARG(pub(crate) Flags); vk_bitflags_wrapped!(DirectDriverLoadingFlagsLUNARG, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DisplayModeCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(DisplayModeCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DisplaySurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(DisplaySurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct AndroidSurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(AndroidSurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ViSurfaceCreateFlagsNN(pub(crate) Flags); vk_bitflags_wrapped!(ViSurfaceCreateFlagsNN, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct WaylandSurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(WaylandSurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct Win32SurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(Win32SurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct XlibSurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(XlibSurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct XcbSurfaceCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(XcbSurfaceCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DirectFBSurfaceCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DirectFBSurfaceCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct IOSSurfaceCreateFlagsMVK(pub(crate) Flags); vk_bitflags_wrapped!(IOSSurfaceCreateFlagsMVK, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MacOSSurfaceCreateFlagsMVK(pub(crate) Flags); vk_bitflags_wrapped!(MacOSSurfaceCreateFlagsMVK, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct MetalSurfaceCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(MetalSurfaceCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ImagePipeSurfaceCreateFlagsFUCHSIA(pub(crate) Flags); vk_bitflags_wrapped!(ImagePipeSurfaceCreateFlagsFUCHSIA, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct StreamDescriptorSurfaceCreateFlagsGGP(pub(crate) Flags); vk_bitflags_wrapped!(StreamDescriptorSurfaceCreateFlagsGGP, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct HeadlessSurfaceCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(HeadlessSurfaceCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ScreenSurfaceCreateFlagsQNX(pub(crate) Flags); vk_bitflags_wrapped!(ScreenSurfaceCreateFlagsQNX, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct CommandPoolTrimFlags(pub(crate) Flags); vk_bitflags_wrapped!(CommandPoolTrimFlags, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineViewportSwizzleStateCreateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(PipelineViewportSwizzleStateCreateFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineDiscardRectangleStateCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PipelineDiscardRectangleStateCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCoverageToColorStateCreateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCoverageToColorStateCreateFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCoverageModulationStateCreateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCoverageModulationStateCreateFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineCoverageReductionStateCreateFlagsNV(pub(crate) Flags); vk_bitflags_wrapped!(PipelineCoverageReductionStateCreateFlagsNV, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct ValidationCacheCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(ValidationCacheCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DebugUtilsMessengerCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DebugUtilsMessengerCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DebugUtilsMessengerCallbackDataFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DebugUtilsMessengerCallbackDataFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct DeviceMemoryReportFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(DeviceMemoryReportFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineRasterizationConservativeStateCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PipelineRasterizationConservativeStateCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineRasterizationStateStreamCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PipelineRasterizationStateStreamCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct PipelineRasterizationDepthClipStateCreateFlagsEXT(pub(crate) Flags); vk_bitflags_wrapped!(PipelineRasterizationDepthClipStateCreateFlagsEXT, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoSessionParametersCreateFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoSessionParametersCreateFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoBeginCodingFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoBeginCodingFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEndCodingFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEndCodingFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoDecodeFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoDecodeFlagsKHR, Flags); #[repr(transparent)] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[doc = ""] pub struct VideoEncodeRateControlFlagsKHR(pub(crate) Flags); vk_bitflags_wrapped!(VideoEncodeRateControlFlagsKHR, Flags); define_handle!( Instance, INSTANCE, doc = "" ); define_handle!( PhysicalDevice, PHYSICAL_DEVICE, doc = "" ); define_handle!( Device, DEVICE, doc = "" ); define_handle!( Queue, QUEUE, doc = "" ); define_handle!( CommandBuffer, COMMAND_BUFFER, doc = "" ); handle_nondispatchable!( DeviceMemory, DEVICE_MEMORY, doc = "" ); handle_nondispatchable!( CommandPool, COMMAND_POOL, doc = "" ); handle_nondispatchable!( Buffer, BUFFER, doc = "" ); handle_nondispatchable!( BufferView, BUFFER_VIEW, doc = "" ); handle_nondispatchable!( Image, IMAGE, doc = "" ); handle_nondispatchable!( ImageView, IMAGE_VIEW, doc = "" ); handle_nondispatchable!( ShaderModule, SHADER_MODULE, doc = "" ); handle_nondispatchable!( Pipeline, PIPELINE, doc = "" ); handle_nondispatchable!( PipelineLayout, PIPELINE_LAYOUT, doc = "" ); handle_nondispatchable!( Sampler, SAMPLER, doc = "" ); handle_nondispatchable!( DescriptorSet, DESCRIPTOR_SET, doc = "" ); handle_nondispatchable ! (DescriptorSetLayout , DESCRIPTOR_SET_LAYOUT , doc = "") ; handle_nondispatchable!( DescriptorPool, DESCRIPTOR_POOL, doc = "" ); handle_nondispatchable!( Fence, FENCE, doc = "" ); handle_nondispatchable!( Semaphore, SEMAPHORE, doc = "" ); handle_nondispatchable!( Event, EVENT, doc = "" ); handle_nondispatchable!( QueryPool, QUERY_POOL, doc = "" ); handle_nondispatchable!( Framebuffer, FRAMEBUFFER, doc = "" ); handle_nondispatchable!( RenderPass, RENDER_PASS, doc = "" ); handle_nondispatchable!( PipelineCache, PIPELINE_CACHE, doc = "" ); handle_nondispatchable ! (IndirectCommandsLayoutNV , INDIRECT_COMMANDS_LAYOUT_NV , doc = "") ; handle_nondispatchable ! (DescriptorUpdateTemplate , DESCRIPTOR_UPDATE_TEMPLATE , doc = "") ; handle_nondispatchable ! (SamplerYcbcrConversion , SAMPLER_YCBCR_CONVERSION , doc = "") ; handle_nondispatchable ! (ValidationCacheEXT , VALIDATION_CACHE_EXT , doc = "") ; handle_nondispatchable ! (AccelerationStructureKHR , ACCELERATION_STRUCTURE_KHR , doc = "") ; handle_nondispatchable ! (AccelerationStructureNV , ACCELERATION_STRUCTURE_NV , doc = "") ; handle_nondispatchable ! (PerformanceConfigurationINTEL , PERFORMANCE_CONFIGURATION_INTEL , doc = "") ; handle_nondispatchable ! (BufferCollectionFUCHSIA , BUFFER_COLLECTION_FUCHSIA , doc = "") ; handle_nondispatchable ! (DeferredOperationKHR , DEFERRED_OPERATION_KHR , doc = "") ; handle_nondispatchable ! (PrivateDataSlot , PRIVATE_DATA_SLOT , doc = "") ; handle_nondispatchable!( CuModuleNVX, CU_MODULE_NVX, doc = "" ); handle_nondispatchable!( CuFunctionNVX, CU_FUNCTION_NVX, doc = "" ); handle_nondispatchable ! (OpticalFlowSessionNV , OPTICAL_FLOW_SESSION_NV , doc = "") ; handle_nondispatchable!( MicromapEXT, MICROMAP_EXT, doc = "" ); handle_nondispatchable!( ShaderEXT, SHADER_EXT, doc = "" ); handle_nondispatchable!( DisplayKHR, DISPLAY_KHR, doc = "" ); handle_nondispatchable!( DisplayModeKHR, DISPLAY_MODE_KHR, doc = "" ); handle_nondispatchable!( SurfaceKHR, SURFACE_KHR, doc = "" ); handle_nondispatchable!( SwapchainKHR, SWAPCHAIN_KHR, doc = "" ); handle_nondispatchable ! (DebugReportCallbackEXT , DEBUG_REPORT_CALLBACK_EXT , doc = "") ; handle_nondispatchable ! (DebugUtilsMessengerEXT , DEBUG_UTILS_MESSENGER_EXT , doc = "") ; handle_nondispatchable ! (VideoSessionKHR , VIDEO_SESSION_KHR , doc = "") ; handle_nondispatchable ! (VideoSessionParametersKHR , VIDEO_SESSION_PARAMETERS_KHR , doc = "") ; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkInternalAllocationNotification = Option< unsafe extern "system" fn( p_user_data: *mut c_void, size: usize, allocation_type: InternalAllocationType, allocation_scope: SystemAllocationScope, ), >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkInternalFreeNotification = Option< unsafe extern "system" fn( p_user_data: *mut c_void, size: usize, allocation_type: InternalAllocationType, allocation_scope: SystemAllocationScope, ), >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkReallocationFunction = Option< unsafe extern "system" fn( p_user_data: *mut c_void, p_original: *mut c_void, size: usize, alignment: usize, allocation_scope: SystemAllocationScope, ) -> *mut c_void, >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkAllocationFunction = Option< unsafe extern "system" fn( p_user_data: *mut c_void, size: usize, alignment: usize, allocation_scope: SystemAllocationScope, ) -> *mut c_void, >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkFreeFunction = Option; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkVoidFunction = Option; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkDebugReportCallbackEXT = Option< unsafe extern "system" fn( flags: DebugReportFlagsEXT, object_type: DebugReportObjectTypeEXT, object: u64, location: usize, message_code: i32, p_layer_prefix: *const c_char, p_message: *const c_char, p_user_data: *mut c_void, ) -> Bool32, >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkDebugUtilsMessengerCallbackEXT = Option< unsafe extern "system" fn( message_severity: DebugUtilsMessageSeverityFlagsEXT, message_types: DebugUtilsMessageTypeFlagsEXT, p_callback_data: *const DebugUtilsMessengerCallbackDataEXT<'_>, p_user_data: *mut c_void, ) -> Bool32, >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkDeviceMemoryReportCallbackEXT = Option< unsafe extern "system" fn( p_callback_data: *const DeviceMemoryReportCallbackDataEXT<'_>, p_user_data: *mut c_void, ), >; #[allow(non_camel_case_types)] #[doc = ""] pub type PFN_vkGetInstanceProcAddrLUNARG = Option< unsafe extern "system" fn(instance: Instance, p_name: *const c_char) -> PFN_vkVoidFunction, >; #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BaseOutStructure<'a> { pub s_type: StructureType, pub p_next: *mut Self, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BaseOutStructure<'_> {} unsafe impl Sync for BaseOutStructure<'_> {} impl ::core::default::Default for BaseOutStructure<'_> { #[inline] fn default() -> Self { Self { s_type: unsafe { ::core::mem::zeroed() }, p_next: ::core::ptr::null_mut(), _marker: PhantomData, } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BaseInStructure<'a> { pub s_type: StructureType, pub p_next: *const Self, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BaseInStructure<'_> {} unsafe impl Sync for BaseInStructure<'_> {} impl ::core::default::Default for BaseInStructure<'_> { #[inline] fn default() -> Self { Self { s_type: unsafe { ::core::mem::zeroed() }, p_next: ::core::ptr::null(), _marker: PhantomData, } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct Offset2D { pub x: i32, pub y: i32, } impl Offset2D { #[inline] pub fn x(mut self, x: i32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: i32) -> Self { self.y = y; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct Offset3D { pub x: i32, pub y: i32, pub z: i32, } impl Offset3D { #[inline] pub fn x(mut self, x: i32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: i32) -> Self { self.y = y; self } #[inline] pub fn z(mut self, z: i32) -> Self { self.z = z; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct Extent2D { pub width: u32, pub height: u32, } impl Extent2D { #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct Extent3D { pub width: u32, pub height: u32, pub depth: u32, } impl Extent3D { #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn depth(mut self, depth: u32) -> Self { self.depth = depth; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct Viewport { pub x: f32, pub y: f32, pub width: f32, pub height: f32, pub min_depth: f32, pub max_depth: f32, } impl Viewport { #[inline] pub fn x(mut self, x: f32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: f32) -> Self { self.y = y; self } #[inline] pub fn width(mut self, width: f32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: f32) -> Self { self.height = height; self } #[inline] pub fn min_depth(mut self, min_depth: f32) -> Self { self.min_depth = min_depth; self } #[inline] pub fn max_depth(mut self, max_depth: f32) -> Self { self.max_depth = max_depth; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct Rect2D { pub offset: Offset2D, pub extent: Extent2D, } impl Rect2D { #[inline] pub fn offset(mut self, offset: Offset2D) -> Self { self.offset = offset; self } #[inline] pub fn extent(mut self, extent: Extent2D) -> Self { self.extent = extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct ClearRect { pub rect: Rect2D, pub base_array_layer: u32, pub layer_count: u32, } impl ClearRect { #[inline] pub fn rect(mut self, rect: Rect2D) -> Self { self.rect = rect; self } #[inline] pub fn base_array_layer(mut self, base_array_layer: u32) -> Self { self.base_array_layer = base_array_layer; self } #[inline] pub fn layer_count(mut self, layer_count: u32) -> Self { self.layer_count = layer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ComponentMapping { pub r: ComponentSwizzle, pub g: ComponentSwizzle, pub b: ComponentSwizzle, pub a: ComponentSwizzle, } impl ComponentMapping { #[inline] pub fn r(mut self, r: ComponentSwizzle) -> Self { self.r = r; self } #[inline] pub fn g(mut self, g: ComponentSwizzle) -> Self { self.g = g; self } #[inline] pub fn b(mut self, b: ComponentSwizzle) -> Self { self.b = b; self } #[inline] pub fn a(mut self, a: ComponentSwizzle) -> Self { self.a = a; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProperties { pub api_version: u32, pub driver_version: u32, pub vendor_id: u32, pub device_id: u32, pub device_type: PhysicalDeviceType, pub device_name: [c_char; MAX_PHYSICAL_DEVICE_NAME_SIZE], pub pipeline_cache_uuid: [u8; UUID_SIZE], pub limits: PhysicalDeviceLimits, pub sparse_properties: PhysicalDeviceSparseProperties, } #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceProperties { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceProperties") .field("api_version", &self.api_version) .field("driver_version", &self.driver_version) .field("vendor_id", &self.vendor_id) .field("device_id", &self.device_id) .field("device_type", &self.device_type) .field("device_name", &self.device_name_as_c_str()) .field("pipeline_cache_uuid", &self.pipeline_cache_uuid) .field("limits", &self.limits) .field("sparse_properties", &self.sparse_properties) .finish() } } impl ::core::default::Default for PhysicalDeviceProperties { #[inline] fn default() -> Self { Self { api_version: u32::default(), driver_version: u32::default(), vendor_id: u32::default(), device_id: u32::default(), device_type: PhysicalDeviceType::default(), device_name: unsafe { ::core::mem::zeroed() }, pipeline_cache_uuid: unsafe { ::core::mem::zeroed() }, limits: PhysicalDeviceLimits::default(), sparse_properties: PhysicalDeviceSparseProperties::default(), } } } impl PhysicalDeviceProperties { #[inline] pub fn api_version(mut self, api_version: u32) -> Self { self.api_version = api_version; self } #[inline] pub fn driver_version(mut self, driver_version: u32) -> Self { self.driver_version = driver_version; self } #[inline] pub fn vendor_id(mut self, vendor_id: u32) -> Self { self.vendor_id = vendor_id; self } #[inline] pub fn device_id(mut self, device_id: u32) -> Self { self.device_id = device_id; self } #[inline] pub fn device_type(mut self, device_type: PhysicalDeviceType) -> Self { self.device_type = device_type; self } #[inline] pub fn device_name( mut self, device_name: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.device_name, device_name).map(|()| self) } #[inline] pub fn device_name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.device_name) } #[inline] pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self { self.pipeline_cache_uuid = pipeline_cache_uuid; self } #[inline] pub fn limits(mut self, limits: PhysicalDeviceLimits) -> Self { self.limits = limits; self } #[inline] pub fn sparse_properties(mut self, sparse_properties: PhysicalDeviceSparseProperties) -> Self { self.sparse_properties = sparse_properties; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExtensionProperties { pub extension_name: [c_char; MAX_EXTENSION_NAME_SIZE], pub spec_version: u32, } #[cfg(feature = "debug")] impl fmt::Debug for ExtensionProperties { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("ExtensionProperties") .field("extension_name", &self.extension_name_as_c_str()) .field("spec_version", &self.spec_version) .finish() } } impl ::core::default::Default for ExtensionProperties { #[inline] fn default() -> Self { Self { extension_name: unsafe { ::core::mem::zeroed() }, spec_version: u32::default(), } } } impl ExtensionProperties { #[inline] pub fn extension_name( mut self, extension_name: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.extension_name, extension_name).map(|()| self) } #[inline] pub fn extension_name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.extension_name) } #[inline] pub fn spec_version(mut self, spec_version: u32) -> Self { self.spec_version = spec_version; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LayerProperties { pub layer_name: [c_char; MAX_EXTENSION_NAME_SIZE], pub spec_version: u32, pub implementation_version: u32, pub description: [c_char; MAX_DESCRIPTION_SIZE], } #[cfg(feature = "debug")] impl fmt::Debug for LayerProperties { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("LayerProperties") .field("layer_name", &self.layer_name_as_c_str()) .field("spec_version", &self.spec_version) .field("implementation_version", &self.implementation_version) .field("description", &self.description_as_c_str()) .finish() } } impl ::core::default::Default for LayerProperties { #[inline] fn default() -> Self { Self { layer_name: unsafe { ::core::mem::zeroed() }, spec_version: u32::default(), implementation_version: u32::default(), description: unsafe { ::core::mem::zeroed() }, } } } impl LayerProperties { #[inline] pub fn layer_name( mut self, layer_name: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.layer_name, layer_name).map(|()| self) } #[inline] pub fn layer_name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.layer_name) } #[inline] pub fn spec_version(mut self, spec_version: u32) -> Self { self.spec_version = spec_version; self } #[inline] pub fn implementation_version(mut self, implementation_version: u32) -> Self { self.implementation_version = implementation_version; self } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ApplicationInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_application_name: *const c_char, pub application_version: u32, pub p_engine_name: *const c_char, pub engine_version: u32, pub api_version: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ApplicationInfo<'_> {} unsafe impl Sync for ApplicationInfo<'_> {} impl ::core::default::Default for ApplicationInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_application_name: ::core::ptr::null(), application_version: u32::default(), p_engine_name: ::core::ptr::null(), engine_version: u32::default(), api_version: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ApplicationInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::APPLICATION_INFO; } impl<'a> ApplicationInfo<'a> { #[inline] pub fn application_name(mut self, application_name: &'a CStr) -> Self { self.p_application_name = application_name.as_ptr(); self } #[inline] pub unsafe fn application_name_as_c_str(&self) -> Option<&CStr> { if self.p_application_name.is_null() { None } else { Some(CStr::from_ptr(self.p_application_name)) } } #[inline] pub fn application_version(mut self, application_version: u32) -> Self { self.application_version = application_version; self } #[inline] pub fn engine_name(mut self, engine_name: &'a CStr) -> Self { self.p_engine_name = engine_name.as_ptr(); self } #[inline] pub unsafe fn engine_name_as_c_str(&self) -> Option<&CStr> { if self.p_engine_name.is_null() { None } else { Some(CStr::from_ptr(self.p_engine_name)) } } #[inline] pub fn engine_version(mut self, engine_version: u32) -> Self { self.engine_version = engine_version; self } #[inline] pub fn api_version(mut self, api_version: u32) -> Self { self.api_version = api_version; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AllocationCallbacks<'a> { pub p_user_data: *mut c_void, pub pfn_allocation: PFN_vkAllocationFunction, pub pfn_reallocation: PFN_vkReallocationFunction, pub pfn_free: PFN_vkFreeFunction, pub pfn_internal_allocation: PFN_vkInternalAllocationNotification, pub pfn_internal_free: PFN_vkInternalFreeNotification, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AllocationCallbacks<'_> {} unsafe impl Sync for AllocationCallbacks<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AllocationCallbacks<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AllocationCallbacks") .field("p_user_data", &self.p_user_data) .field( "pfn_allocation", &(self.pfn_allocation.map(|x| x as *const ())), ) .field( "pfn_reallocation", &(self.pfn_reallocation.map(|x| x as *const ())), ) .field("pfn_free", &(self.pfn_free.map(|x| x as *const ()))) .field( "pfn_internal_allocation", &(self.pfn_internal_allocation.map(|x| x as *const ())), ) .field( "pfn_internal_free", &(self.pfn_internal_free.map(|x| x as *const ())), ) .finish() } } impl ::core::default::Default for AllocationCallbacks<'_> { #[inline] fn default() -> Self { Self { p_user_data: ::core::ptr::null_mut(), pfn_allocation: PFN_vkAllocationFunction::default(), pfn_reallocation: PFN_vkReallocationFunction::default(), pfn_free: PFN_vkFreeFunction::default(), pfn_internal_allocation: PFN_vkInternalAllocationNotification::default(), pfn_internal_free: PFN_vkInternalFreeNotification::default(), _marker: PhantomData, } } } impl<'a> AllocationCallbacks<'a> { #[inline] pub fn user_data(mut self, user_data: *mut c_void) -> Self { self.p_user_data = user_data; self } #[inline] pub fn pfn_allocation(mut self, pfn_allocation: PFN_vkAllocationFunction) -> Self { self.pfn_allocation = pfn_allocation; self } #[inline] pub fn pfn_reallocation(mut self, pfn_reallocation: PFN_vkReallocationFunction) -> Self { self.pfn_reallocation = pfn_reallocation; self } #[inline] pub fn pfn_free(mut self, pfn_free: PFN_vkFreeFunction) -> Self { self.pfn_free = pfn_free; self } #[inline] pub fn pfn_internal_allocation( mut self, pfn_internal_allocation: PFN_vkInternalAllocationNotification, ) -> Self { self.pfn_internal_allocation = pfn_internal_allocation; self } #[inline] pub fn pfn_internal_free(mut self, pfn_internal_free: PFN_vkInternalFreeNotification) -> Self { self.pfn_internal_free = pfn_internal_free; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceQueueCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DeviceQueueCreateFlags, pub queue_family_index: u32, pub queue_count: u32, pub p_queue_priorities: *const f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceQueueCreateInfo<'_> {} unsafe impl Sync for DeviceQueueCreateInfo<'_> {} impl ::core::default::Default for DeviceQueueCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DeviceQueueCreateFlags::default(), queue_family_index: u32::default(), queue_count: u32::default(), p_queue_priorities: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceQueueCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_CREATE_INFO; } pub unsafe trait ExtendsDeviceQueueCreateInfo {} impl<'a> DeviceQueueCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn queue_family_index(mut self, queue_family_index: u32) -> Self { self.queue_family_index = queue_family_index; self } #[inline] pub fn queue_priorities(mut self, queue_priorities: &'a [f32]) -> Self { self.queue_count = queue_priorities.len() as _; self.p_queue_priorities = queue_priorities.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DeviceCreateFlags, pub queue_create_info_count: u32, pub p_queue_create_infos: *const DeviceQueueCreateInfo<'a>, #[deprecated = "functionality described by this member no longer operates"] pub enabled_layer_count: u32, #[deprecated = "functionality described by this member no longer operates"] pub pp_enabled_layer_names: *const *const c_char, pub enabled_extension_count: u32, pub pp_enabled_extension_names: *const *const c_char, pub p_enabled_features: *const PhysicalDeviceFeatures, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceCreateInfo<'_> {} unsafe impl Sync for DeviceCreateInfo<'_> {} impl ::core::default::Default for DeviceCreateInfo<'_> { #[inline] fn default() -> Self { #[allow(deprecated)] Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DeviceCreateFlags::default(), queue_create_info_count: u32::default(), p_queue_create_infos: ::core::ptr::null(), enabled_layer_count: u32::default(), pp_enabled_layer_names: ::core::ptr::null(), enabled_extension_count: u32::default(), pp_enabled_extension_names: ::core::ptr::null(), p_enabled_features: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_CREATE_INFO; } pub unsafe trait ExtendsDeviceCreateInfo {} impl<'a> DeviceCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: DeviceCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn queue_create_infos( mut self, queue_create_infos: &'a [DeviceQueueCreateInfo<'a>], ) -> Self { self.queue_create_info_count = queue_create_infos.len() as _; self.p_queue_create_infos = queue_create_infos.as_ptr(); self } #[deprecated = "functionality described by this member no longer operates"] #[allow(deprecated)] #[inline] pub fn enabled_layer_names(mut self, enabled_layer_names: &'a [*const c_char]) -> Self { self.enabled_layer_count = enabled_layer_names.len() as _; self.pp_enabled_layer_names = enabled_layer_names.as_ptr(); self } #[inline] pub fn enabled_extension_names(mut self, enabled_extension_names: &'a [*const c_char]) -> Self { self.enabled_extension_count = enabled_extension_names.len() as _; self.pp_enabled_extension_names = enabled_extension_names.as_ptr(); self } #[inline] pub fn enabled_features(mut self, enabled_features: &'a PhysicalDeviceFeatures) -> Self { self.p_enabled_features = enabled_features; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct InstanceCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: InstanceCreateFlags, pub p_application_info: *const ApplicationInfo<'a>, pub enabled_layer_count: u32, pub pp_enabled_layer_names: *const *const c_char, pub enabled_extension_count: u32, pub pp_enabled_extension_names: *const *const c_char, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for InstanceCreateInfo<'_> {} unsafe impl Sync for InstanceCreateInfo<'_> {} impl ::core::default::Default for InstanceCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: InstanceCreateFlags::default(), p_application_info: ::core::ptr::null(), enabled_layer_count: u32::default(), pp_enabled_layer_names: ::core::ptr::null(), enabled_extension_count: u32::default(), pp_enabled_extension_names: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for InstanceCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::INSTANCE_CREATE_INFO; } pub unsafe trait ExtendsInstanceCreateInfo {} impl<'a> InstanceCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: InstanceCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn application_info(mut self, application_info: &'a ApplicationInfo<'a>) -> Self { self.p_application_info = application_info; self } #[inline] pub fn enabled_layer_names(mut self, enabled_layer_names: &'a [*const c_char]) -> Self { self.enabled_layer_count = enabled_layer_names.len() as _; self.pp_enabled_layer_names = enabled_layer_names.as_ptr(); self } #[inline] pub fn enabled_extension_names(mut self, enabled_extension_names: &'a [*const c_char]) -> Self { self.enabled_extension_count = enabled_extension_names.len() as _; self.pp_enabled_extension_names = enabled_extension_names.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct QueueFamilyProperties { pub queue_flags: QueueFlags, pub queue_count: u32, pub timestamp_valid_bits: u32, pub min_image_transfer_granularity: Extent3D, } impl QueueFamilyProperties { #[inline] pub fn queue_flags(mut self, queue_flags: QueueFlags) -> Self { self.queue_flags = queue_flags; self } #[inline] pub fn queue_count(mut self, queue_count: u32) -> Self { self.queue_count = queue_count; self } #[inline] pub fn timestamp_valid_bits(mut self, timestamp_valid_bits: u32) -> Self { self.timestamp_valid_bits = timestamp_valid_bits; self } #[inline] pub fn min_image_transfer_granularity( mut self, min_image_transfer_granularity: Extent3D, ) -> Self { self.min_image_transfer_granularity = min_image_transfer_granularity; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryProperties { pub memory_type_count: u32, pub memory_types: [MemoryType; MAX_MEMORY_TYPES], pub memory_heap_count: u32, pub memory_heaps: [MemoryHeap; MAX_MEMORY_HEAPS], } #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceMemoryProperties { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceMemoryProperties") .field("memory_type_count", &self.memory_type_count) .field("memory_types", &self.memory_types_as_slice()) .field("memory_heap_count", &self.memory_heap_count) .field("memory_heaps", &self.memory_heaps_as_slice()) .finish() } } impl ::core::default::Default for PhysicalDeviceMemoryProperties { #[inline] fn default() -> Self { Self { memory_type_count: u32::default(), memory_types: unsafe { ::core::mem::zeroed() }, memory_heap_count: u32::default(), memory_heaps: unsafe { ::core::mem::zeroed() }, } } } impl PhysicalDeviceMemoryProperties { #[inline] pub fn memory_types(mut self, memory_types: &'_ [MemoryType]) -> Self { self.memory_type_count = memory_types.len() as _; self.memory_types[..memory_types.len()].copy_from_slice(memory_types); self } #[inline] pub fn memory_types_as_slice(&self) -> &[MemoryType] { &self.memory_types[..self.memory_type_count as _] } #[inline] pub fn memory_heaps(mut self, memory_heaps: &'_ [MemoryHeap]) -> Self { self.memory_heap_count = memory_heaps.len() as _; self.memory_heaps[..memory_heaps.len()].copy_from_slice(memory_heaps); self } #[inline] pub fn memory_heaps_as_slice(&self) -> &[MemoryHeap] { &self.memory_heaps[..self.memory_heap_count as _] } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub allocation_size: DeviceSize, pub memory_type_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryAllocateInfo<'_> {} unsafe impl Sync for MemoryAllocateInfo<'_> {} impl ::core::default::Default for MemoryAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), allocation_size: DeviceSize::default(), memory_type_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ALLOCATE_INFO; } pub unsafe trait ExtendsMemoryAllocateInfo {} impl<'a> MemoryAllocateInfo<'a> { #[inline] pub fn allocation_size(mut self, allocation_size: DeviceSize) -> Self { self.allocation_size = allocation_size; self } #[inline] pub fn memory_type_index(mut self, memory_type_index: u32) -> Self { self.memory_type_index = memory_type_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MemoryRequirements { pub size: DeviceSize, pub alignment: DeviceSize, pub memory_type_bits: u32, } impl MemoryRequirements { #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn alignment(mut self, alignment: DeviceSize) -> Self { self.alignment = alignment; self } #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SparseImageFormatProperties { pub aspect_mask: ImageAspectFlags, pub image_granularity: Extent3D, pub flags: SparseImageFormatFlags, } impl SparseImageFormatProperties { #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[inline] pub fn image_granularity(mut self, image_granularity: Extent3D) -> Self { self.image_granularity = image_granularity; self } #[inline] pub fn flags(mut self, flags: SparseImageFormatFlags) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SparseImageMemoryRequirements { pub format_properties: SparseImageFormatProperties, pub image_mip_tail_first_lod: u32, pub image_mip_tail_size: DeviceSize, pub image_mip_tail_offset: DeviceSize, pub image_mip_tail_stride: DeviceSize, } impl SparseImageMemoryRequirements { #[inline] pub fn format_properties(mut self, format_properties: SparseImageFormatProperties) -> Self { self.format_properties = format_properties; self } #[inline] pub fn image_mip_tail_first_lod(mut self, image_mip_tail_first_lod: u32) -> Self { self.image_mip_tail_first_lod = image_mip_tail_first_lod; self } #[inline] pub fn image_mip_tail_size(mut self, image_mip_tail_size: DeviceSize) -> Self { self.image_mip_tail_size = image_mip_tail_size; self } #[inline] pub fn image_mip_tail_offset(mut self, image_mip_tail_offset: DeviceSize) -> Self { self.image_mip_tail_offset = image_mip_tail_offset; self } #[inline] pub fn image_mip_tail_stride(mut self, image_mip_tail_stride: DeviceSize) -> Self { self.image_mip_tail_stride = image_mip_tail_stride; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MemoryType { pub property_flags: MemoryPropertyFlags, pub heap_index: u32, } impl MemoryType { #[inline] pub fn property_flags(mut self, property_flags: MemoryPropertyFlags) -> Self { self.property_flags = property_flags; self } #[inline] pub fn heap_index(mut self, heap_index: u32) -> Self { self.heap_index = heap_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MemoryHeap { pub size: DeviceSize, pub flags: MemoryHeapFlags, } impl MemoryHeap { #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn flags(mut self, flags: MemoryHeapFlags) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MappedMemoryRange<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub offset: DeviceSize, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MappedMemoryRange<'_> {} unsafe impl Sync for MappedMemoryRange<'_> {} impl ::core::default::Default for MappedMemoryRange<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), offset: DeviceSize::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MappedMemoryRange<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MAPPED_MEMORY_RANGE; } impl<'a> MappedMemoryRange<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct FormatProperties { pub linear_tiling_features: FormatFeatureFlags, pub optimal_tiling_features: FormatFeatureFlags, pub buffer_features: FormatFeatureFlags, } impl FormatProperties { #[inline] pub fn linear_tiling_features(mut self, linear_tiling_features: FormatFeatureFlags) -> Self { self.linear_tiling_features = linear_tiling_features; self } #[inline] pub fn optimal_tiling_features(mut self, optimal_tiling_features: FormatFeatureFlags) -> Self { self.optimal_tiling_features = optimal_tiling_features; self } #[inline] pub fn buffer_features(mut self, buffer_features: FormatFeatureFlags) -> Self { self.buffer_features = buffer_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageFormatProperties { pub max_extent: Extent3D, pub max_mip_levels: u32, pub max_array_layers: u32, pub sample_counts: SampleCountFlags, pub max_resource_size: DeviceSize, } impl ImageFormatProperties { #[inline] pub fn max_extent(mut self, max_extent: Extent3D) -> Self { self.max_extent = max_extent; self } #[inline] pub fn max_mip_levels(mut self, max_mip_levels: u32) -> Self { self.max_mip_levels = max_mip_levels; self } #[inline] pub fn max_array_layers(mut self, max_array_layers: u32) -> Self { self.max_array_layers = max_array_layers; self } #[inline] pub fn sample_counts(mut self, sample_counts: SampleCountFlags) -> Self { self.sample_counts = sample_counts; self } #[inline] pub fn max_resource_size(mut self, max_resource_size: DeviceSize) -> Self { self.max_resource_size = max_resource_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DescriptorBufferInfo { pub buffer: Buffer, pub offset: DeviceSize, pub range: DeviceSize, } impl DescriptorBufferInfo { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn range(mut self, range: DeviceSize) -> Self { self.range = range; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DescriptorImageInfo { pub sampler: Sampler, pub image_view: ImageView, pub image_layout: ImageLayout, } impl DescriptorImageInfo { #[inline] pub fn sampler(mut self, sampler: Sampler) -> Self { self.sampler = sampler; self } #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn image_layout(mut self, image_layout: ImageLayout) -> Self { self.image_layout = image_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct WriteDescriptorSet<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub dst_set: DescriptorSet, pub dst_binding: u32, pub dst_array_element: u32, pub descriptor_count: u32, pub descriptor_type: DescriptorType, pub p_image_info: *const DescriptorImageInfo, pub p_buffer_info: *const DescriptorBufferInfo, pub p_texel_buffer_view: *const BufferView, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for WriteDescriptorSet<'_> {} unsafe impl Sync for WriteDescriptorSet<'_> {} impl ::core::default::Default for WriteDescriptorSet<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), dst_set: DescriptorSet::default(), dst_binding: u32::default(), dst_array_element: u32::default(), descriptor_count: u32::default(), descriptor_type: DescriptorType::default(), p_image_info: ::core::ptr::null(), p_buffer_info: ::core::ptr::null(), p_texel_buffer_view: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for WriteDescriptorSet<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET; } pub unsafe trait ExtendsWriteDescriptorSet {} impl<'a> WriteDescriptorSet<'a> { #[inline] pub fn dst_set(mut self, dst_set: DescriptorSet) -> Self { self.dst_set = dst_set; self } #[inline] pub fn dst_binding(mut self, dst_binding: u32) -> Self { self.dst_binding = dst_binding; self } #[inline] pub fn dst_array_element(mut self, dst_array_element: u32) -> Self { self.dst_array_element = dst_array_element; self } #[inline] pub fn descriptor_count(mut self, descriptor_count: u32) -> Self { self.descriptor_count = descriptor_count; self } #[inline] pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self { self.descriptor_type = descriptor_type; self } #[inline] pub fn image_info(mut self, image_info: &'a [DescriptorImageInfo]) -> Self { self.descriptor_count = image_info.len() as _; self.p_image_info = image_info.as_ptr(); self } #[inline] pub fn buffer_info(mut self, buffer_info: &'a [DescriptorBufferInfo]) -> Self { self.descriptor_count = buffer_info.len() as _; self.p_buffer_info = buffer_info.as_ptr(); self } #[inline] pub fn texel_buffer_view(mut self, texel_buffer_view: &'a [BufferView]) -> Self { self.descriptor_count = texel_buffer_view.len() as _; self.p_texel_buffer_view = texel_buffer_view.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyDescriptorSet<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_set: DescriptorSet, pub src_binding: u32, pub src_array_element: u32, pub dst_set: DescriptorSet, pub dst_binding: u32, pub dst_array_element: u32, pub descriptor_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyDescriptorSet<'_> {} unsafe impl Sync for CopyDescriptorSet<'_> {} impl ::core::default::Default for CopyDescriptorSet<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_set: DescriptorSet::default(), src_binding: u32::default(), src_array_element: u32::default(), dst_set: DescriptorSet::default(), dst_binding: u32::default(), dst_array_element: u32::default(), descriptor_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyDescriptorSet<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_DESCRIPTOR_SET; } impl<'a> CopyDescriptorSet<'a> { #[inline] pub fn src_set(mut self, src_set: DescriptorSet) -> Self { self.src_set = src_set; self } #[inline] pub fn src_binding(mut self, src_binding: u32) -> Self { self.src_binding = src_binding; self } #[inline] pub fn src_array_element(mut self, src_array_element: u32) -> Self { self.src_array_element = src_array_element; self } #[inline] pub fn dst_set(mut self, dst_set: DescriptorSet) -> Self { self.dst_set = dst_set; self } #[inline] pub fn dst_binding(mut self, dst_binding: u32) -> Self { self.dst_binding = dst_binding; self } #[inline] pub fn dst_array_element(mut self, dst_array_element: u32) -> Self { self.dst_array_element = dst_array_element; self } #[inline] pub fn descriptor_count(mut self, descriptor_count: u32) -> Self { self.descriptor_count = descriptor_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferUsageFlags2CreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub usage: BufferUsageFlags2KHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferUsageFlags2CreateInfoKHR<'_> {} unsafe impl Sync for BufferUsageFlags2CreateInfoKHR<'_> {} impl ::core::default::Default for BufferUsageFlags2CreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), usage: BufferUsageFlags2KHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferUsageFlags2CreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR; } unsafe impl ExtendsBufferViewCreateInfo for BufferUsageFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsBufferCreateInfo for BufferUsageFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsPhysicalDeviceExternalBufferInfo for BufferUsageFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsDescriptorBufferBindingInfoEXT for BufferUsageFlags2CreateInfoKHR<'_> {} impl<'a> BufferUsageFlags2CreateInfoKHR<'a> { #[inline] pub fn usage(mut self, usage: BufferUsageFlags2KHR) -> Self { self.usage = usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: BufferCreateFlags, pub size: DeviceSize, pub usage: BufferUsageFlags, pub sharing_mode: SharingMode, pub queue_family_index_count: u32, pub p_queue_family_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCreateInfo<'_> {} unsafe impl Sync for BufferCreateInfo<'_> {} impl ::core::default::Default for BufferCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: BufferCreateFlags::default(), size: DeviceSize::default(), usage: BufferUsageFlags::default(), sharing_mode: SharingMode::default(), queue_family_index_count: u32::default(), p_queue_family_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CREATE_INFO; } pub unsafe trait ExtendsBufferCreateInfo {} impl<'a> BufferCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: BufferCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn usage(mut self, usage: BufferUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self { self.sharing_mode = sharing_mode; self } #[inline] pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self { self.queue_family_index_count = queue_family_indices.len() as _; self.p_queue_family_indices = queue_family_indices.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferViewCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: BufferViewCreateFlags, pub buffer: Buffer, pub format: Format, pub offset: DeviceSize, pub range: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferViewCreateInfo<'_> {} unsafe impl Sync for BufferViewCreateInfo<'_> {} impl ::core::default::Default for BufferViewCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: BufferViewCreateFlags::default(), buffer: Buffer::default(), format: Format::default(), offset: DeviceSize::default(), range: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferViewCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_VIEW_CREATE_INFO; } pub unsafe trait ExtendsBufferViewCreateInfo {} impl<'a> BufferViewCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: BufferViewCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn range(mut self, range: DeviceSize) -> Self { self.range = range; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageSubresource { pub aspect_mask: ImageAspectFlags, pub mip_level: u32, pub array_layer: u32, } impl ImageSubresource { #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[inline] pub fn mip_level(mut self, mip_level: u32) -> Self { self.mip_level = mip_level; self } #[inline] pub fn array_layer(mut self, array_layer: u32) -> Self { self.array_layer = array_layer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageSubresourceLayers { pub aspect_mask: ImageAspectFlags, pub mip_level: u32, pub base_array_layer: u32, pub layer_count: u32, } impl ImageSubresourceLayers { #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[inline] pub fn mip_level(mut self, mip_level: u32) -> Self { self.mip_level = mip_level; self } #[inline] pub fn base_array_layer(mut self, base_array_layer: u32) -> Self { self.base_array_layer = base_array_layer; self } #[inline] pub fn layer_count(mut self, layer_count: u32) -> Self { self.layer_count = layer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageSubresourceRange { pub aspect_mask: ImageAspectFlags, pub base_mip_level: u32, pub level_count: u32, pub base_array_layer: u32, pub layer_count: u32, } impl ImageSubresourceRange { #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[inline] pub fn base_mip_level(mut self, base_mip_level: u32) -> Self { self.base_mip_level = base_mip_level; self } #[inline] pub fn level_count(mut self, level_count: u32) -> Self { self.level_count = level_count; self } #[inline] pub fn base_array_layer(mut self, base_array_layer: u32) -> Self { self.base_array_layer = base_array_layer; self } #[inline] pub fn layer_count(mut self, layer_count: u32) -> Self { self.layer_count = layer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryBarrier<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_access_mask: AccessFlags, pub dst_access_mask: AccessFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryBarrier<'_> {} unsafe impl Sync for MemoryBarrier<'_> {} impl ::core::default::Default for MemoryBarrier<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_access_mask: AccessFlags::default(), dst_access_mask: AccessFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryBarrier<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_BARRIER; } impl<'a> MemoryBarrier<'a> { #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self { self.dst_access_mask = dst_access_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferMemoryBarrier<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_access_mask: AccessFlags, pub dst_access_mask: AccessFlags, pub src_queue_family_index: u32, pub dst_queue_family_index: u32, pub buffer: Buffer, pub offset: DeviceSize, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferMemoryBarrier<'_> {} unsafe impl Sync for BufferMemoryBarrier<'_> {} impl ::core::default::Default for BufferMemoryBarrier<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_access_mask: AccessFlags::default(), dst_access_mask: AccessFlags::default(), src_queue_family_index: u32::default(), dst_queue_family_index: u32::default(), buffer: Buffer::default(), offset: DeviceSize::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferMemoryBarrier<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_BARRIER; } pub unsafe trait ExtendsBufferMemoryBarrier {} impl<'a> BufferMemoryBarrier<'a> { #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { self.src_queue_family_index = src_queue_family_index; self } #[inline] pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { self.dst_queue_family_index = dst_queue_family_index; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageMemoryBarrier<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_access_mask: AccessFlags, pub dst_access_mask: AccessFlags, pub old_layout: ImageLayout, pub new_layout: ImageLayout, pub src_queue_family_index: u32, pub dst_queue_family_index: u32, pub image: Image, pub subresource_range: ImageSubresourceRange, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageMemoryBarrier<'_> {} unsafe impl Sync for ImageMemoryBarrier<'_> {} impl ::core::default::Default for ImageMemoryBarrier<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_access_mask: AccessFlags::default(), dst_access_mask: AccessFlags::default(), old_layout: ImageLayout::default(), new_layout: ImageLayout::default(), src_queue_family_index: u32::default(), dst_queue_family_index: u32::default(), image: Image::default(), subresource_range: ImageSubresourceRange::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageMemoryBarrier<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_BARRIER; } pub unsafe trait ExtendsImageMemoryBarrier {} impl<'a> ImageMemoryBarrier<'a> { #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn old_layout(mut self, old_layout: ImageLayout) -> Self { self.old_layout = old_layout; self } #[inline] pub fn new_layout(mut self, new_layout: ImageLayout) -> Self { self.new_layout = new_layout; self } #[inline] pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { self.src_queue_family_index = src_queue_family_index; self } #[inline] pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { self.dst_queue_family_index = dst_queue_family_index; self } #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self { self.subresource_range = subresource_range; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ImageCreateFlags, pub image_type: ImageType, pub format: Format, pub extent: Extent3D, pub mip_levels: u32, pub array_layers: u32, pub samples: SampleCountFlags, pub tiling: ImageTiling, pub usage: ImageUsageFlags, pub sharing_mode: SharingMode, pub queue_family_index_count: u32, pub p_queue_family_indices: *const u32, pub initial_layout: ImageLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageCreateInfo<'_> {} unsafe impl Sync for ImageCreateInfo<'_> {} impl ::core::default::Default for ImageCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ImageCreateFlags::default(), image_type: ImageType::default(), format: Format::default(), extent: Extent3D::default(), mip_levels: u32::default(), array_layers: u32::default(), samples: SampleCountFlags::default(), tiling: ImageTiling::default(), usage: ImageUsageFlags::default(), sharing_mode: SharingMode::default(), queue_family_index_count: u32::default(), p_queue_family_indices: ::core::ptr::null(), initial_layout: ImageLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CREATE_INFO; } pub unsafe trait ExtendsImageCreateInfo {} impl<'a> ImageCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: ImageCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn image_type(mut self, image_type: ImageType) -> Self { self.image_type = image_type; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } #[inline] pub fn mip_levels(mut self, mip_levels: u32) -> Self { self.mip_levels = mip_levels; self } #[inline] pub fn array_layers(mut self, array_layers: u32) -> Self { self.array_layers = array_layers; self } #[inline] pub fn samples(mut self, samples: SampleCountFlags) -> Self { self.samples = samples; self } #[inline] pub fn tiling(mut self, tiling: ImageTiling) -> Self { self.tiling = tiling; self } #[inline] pub fn usage(mut self, usage: ImageUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self { self.sharing_mode = sharing_mode; self } #[inline] pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self { self.queue_family_index_count = queue_family_indices.len() as _; self.p_queue_family_indices = queue_family_indices.as_ptr(); self } #[inline] pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self { self.initial_layout = initial_layout; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SubresourceLayout { pub offset: DeviceSize, pub size: DeviceSize, pub row_pitch: DeviceSize, pub array_pitch: DeviceSize, pub depth_pitch: DeviceSize, } impl SubresourceLayout { #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn row_pitch(mut self, row_pitch: DeviceSize) -> Self { self.row_pitch = row_pitch; self } #[inline] pub fn array_pitch(mut self, array_pitch: DeviceSize) -> Self { self.array_pitch = array_pitch; self } #[inline] pub fn depth_pitch(mut self, depth_pitch: DeviceSize) -> Self { self.depth_pitch = depth_pitch; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ImageViewCreateFlags, pub image: Image, pub view_type: ImageViewType, pub format: Format, pub components: ComponentMapping, pub subresource_range: ImageSubresourceRange, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewCreateInfo<'_> {} unsafe impl Sync for ImageViewCreateInfo<'_> {} impl ::core::default::Default for ImageViewCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ImageViewCreateFlags::default(), image: Image::default(), view_type: ImageViewType::default(), format: Format::default(), components: ComponentMapping::default(), subresource_range: ImageSubresourceRange::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_CREATE_INFO; } pub unsafe trait ExtendsImageViewCreateInfo {} impl<'a> ImageViewCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: ImageViewCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn view_type(mut self, view_type: ImageViewType) -> Self { self.view_type = view_type; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn components(mut self, components: ComponentMapping) -> Self { self.components = components; self } #[inline] pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self { self.subresource_range = subresource_range; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BufferCopy { pub src_offset: DeviceSize, pub dst_offset: DeviceSize, pub size: DeviceSize, } impl BufferCopy { #[inline] pub fn src_offset(mut self, src_offset: DeviceSize) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_offset(mut self, dst_offset: DeviceSize) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SparseMemoryBind { pub resource_offset: DeviceSize, pub size: DeviceSize, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub flags: SparseMemoryBindFlags, } impl SparseMemoryBind { #[inline] pub fn resource_offset(mut self, resource_offset: DeviceSize) -> Self { self.resource_offset = resource_offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[inline] pub fn flags(mut self, flags: SparseMemoryBindFlags) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SparseImageMemoryBind { pub subresource: ImageSubresource, pub offset: Offset3D, pub extent: Extent3D, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub flags: SparseMemoryBindFlags, } impl SparseImageMemoryBind { #[inline] pub fn subresource(mut self, subresource: ImageSubresource) -> Self { self.subresource = subresource; self } #[inline] pub fn offset(mut self, offset: Offset3D) -> Self { self.offset = offset; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[inline] pub fn flags(mut self, flags: SparseMemoryBindFlags) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SparseBufferMemoryBindInfo<'a> { pub buffer: Buffer, pub bind_count: u32, pub p_binds: *const SparseMemoryBind, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SparseBufferMemoryBindInfo<'_> {} unsafe impl Sync for SparseBufferMemoryBindInfo<'_> {} impl ::core::default::Default for SparseBufferMemoryBindInfo<'_> { #[inline] fn default() -> Self { Self { buffer: Buffer::default(), bind_count: u32::default(), p_binds: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> SparseBufferMemoryBindInfo<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn binds(mut self, binds: &'a [SparseMemoryBind]) -> Self { self.bind_count = binds.len() as _; self.p_binds = binds.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SparseImageOpaqueMemoryBindInfo<'a> { pub image: Image, pub bind_count: u32, pub p_binds: *const SparseMemoryBind, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SparseImageOpaqueMemoryBindInfo<'_> {} unsafe impl Sync for SparseImageOpaqueMemoryBindInfo<'_> {} impl ::core::default::Default for SparseImageOpaqueMemoryBindInfo<'_> { #[inline] fn default() -> Self { Self { image: Image::default(), bind_count: u32::default(), p_binds: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> SparseImageOpaqueMemoryBindInfo<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn binds(mut self, binds: &'a [SparseMemoryBind]) -> Self { self.bind_count = binds.len() as _; self.p_binds = binds.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SparseImageMemoryBindInfo<'a> { pub image: Image, pub bind_count: u32, pub p_binds: *const SparseImageMemoryBind, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SparseImageMemoryBindInfo<'_> {} unsafe impl Sync for SparseImageMemoryBindInfo<'_> {} impl ::core::default::Default for SparseImageMemoryBindInfo<'_> { #[inline] fn default() -> Self { Self { image: Image::default(), bind_count: u32::default(), p_binds: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> SparseImageMemoryBindInfo<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn binds(mut self, binds: &'a [SparseImageMemoryBind]) -> Self { self.bind_count = binds.len() as _; self.p_binds = binds.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindSparseInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_count: u32, pub p_wait_semaphores: *const Semaphore, pub buffer_bind_count: u32, pub p_buffer_binds: *const SparseBufferMemoryBindInfo<'a>, pub image_opaque_bind_count: u32, pub p_image_opaque_binds: *const SparseImageOpaqueMemoryBindInfo<'a>, pub image_bind_count: u32, pub p_image_binds: *const SparseImageMemoryBindInfo<'a>, pub signal_semaphore_count: u32, pub p_signal_semaphores: *const Semaphore, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindSparseInfo<'_> {} unsafe impl Sync for BindSparseInfo<'_> {} impl ::core::default::Default for BindSparseInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_count: u32::default(), p_wait_semaphores: ::core::ptr::null(), buffer_bind_count: u32::default(), p_buffer_binds: ::core::ptr::null(), image_opaque_bind_count: u32::default(), p_image_opaque_binds: ::core::ptr::null(), image_bind_count: u32::default(), p_image_binds: ::core::ptr::null(), signal_semaphore_count: u32::default(), p_signal_semaphores: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindSparseInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_SPARSE_INFO; } pub unsafe trait ExtendsBindSparseInfo {} impl<'a> BindSparseInfo<'a> { #[inline] pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self { self.wait_semaphore_count = wait_semaphores.len() as _; self.p_wait_semaphores = wait_semaphores.as_ptr(); self } #[inline] pub fn buffer_binds(mut self, buffer_binds: &'a [SparseBufferMemoryBindInfo<'a>]) -> Self { self.buffer_bind_count = buffer_binds.len() as _; self.p_buffer_binds = buffer_binds.as_ptr(); self } #[inline] pub fn image_opaque_binds( mut self, image_opaque_binds: &'a [SparseImageOpaqueMemoryBindInfo<'a>], ) -> Self { self.image_opaque_bind_count = image_opaque_binds.len() as _; self.p_image_opaque_binds = image_opaque_binds.as_ptr(); self } #[inline] pub fn image_binds(mut self, image_binds: &'a [SparseImageMemoryBindInfo<'a>]) -> Self { self.image_bind_count = image_binds.len() as _; self.p_image_binds = image_binds.as_ptr(); self } #[inline] pub fn signal_semaphores(mut self, signal_semaphores: &'a [Semaphore]) -> Self { self.signal_semaphore_count = signal_semaphores.len() as _; self.p_signal_semaphores = signal_semaphores.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageCopy { pub src_subresource: ImageSubresourceLayers, pub src_offset: Offset3D, pub dst_subresource: ImageSubresourceLayers, pub dst_offset: Offset3D, pub extent: Extent3D, } impl ImageCopy { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offset(mut self, src_offset: Offset3D) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageBlit { pub src_subresource: ImageSubresourceLayers, pub src_offsets: [Offset3D; 2], pub dst_subresource: ImageSubresourceLayers, pub dst_offsets: [Offset3D; 2], } impl ::core::default::Default for ImageBlit { #[inline] fn default() -> Self { Self { src_subresource: ImageSubresourceLayers::default(), src_offsets: unsafe { ::core::mem::zeroed() }, dst_subresource: ImageSubresourceLayers::default(), dst_offsets: unsafe { ::core::mem::zeroed() }, } } } impl ImageBlit { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offsets(mut self, src_offsets: [Offset3D; 2]) -> Self { self.src_offsets = src_offsets; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offsets(mut self, dst_offsets: [Offset3D; 2]) -> Self { self.dst_offsets = dst_offsets; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BufferImageCopy { pub buffer_offset: DeviceSize, pub buffer_row_length: u32, pub buffer_image_height: u32, pub image_subresource: ImageSubresourceLayers, pub image_offset: Offset3D, pub image_extent: Extent3D, } impl BufferImageCopy { #[inline] pub fn buffer_offset(mut self, buffer_offset: DeviceSize) -> Self { self.buffer_offset = buffer_offset; self } #[inline] pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self { self.buffer_row_length = buffer_row_length; self } #[inline] pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self { self.buffer_image_height = buffer_image_height; self } #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self { self.image_subresource = image_subresource; self } #[inline] pub fn image_offset(mut self, image_offset: Offset3D) -> Self { self.image_offset = image_offset; self } #[inline] pub fn image_extent(mut self, image_extent: Extent3D) -> Self { self.image_extent = image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct CopyMemoryIndirectCommandNV { pub src_address: DeviceAddress, pub dst_address: DeviceAddress, pub size: DeviceSize, } impl CopyMemoryIndirectCommandNV { #[inline] pub fn src_address(mut self, src_address: DeviceAddress) -> Self { self.src_address = src_address; self } #[inline] pub fn dst_address(mut self, dst_address: DeviceAddress) -> Self { self.dst_address = dst_address; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct CopyMemoryToImageIndirectCommandNV { pub src_address: DeviceAddress, pub buffer_row_length: u32, pub buffer_image_height: u32, pub image_subresource: ImageSubresourceLayers, pub image_offset: Offset3D, pub image_extent: Extent3D, } impl CopyMemoryToImageIndirectCommandNV { #[inline] pub fn src_address(mut self, src_address: DeviceAddress) -> Self { self.src_address = src_address; self } #[inline] pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self { self.buffer_row_length = buffer_row_length; self } #[inline] pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self { self.buffer_image_height = buffer_image_height; self } #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self { self.image_subresource = image_subresource; self } #[inline] pub fn image_offset(mut self, image_offset: Offset3D) -> Self { self.image_offset = image_offset; self } #[inline] pub fn image_extent(mut self, image_extent: Extent3D) -> Self { self.image_extent = image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ImageResolve { pub src_subresource: ImageSubresourceLayers, pub src_offset: Offset3D, pub dst_subresource: ImageSubresourceLayers, pub dst_offset: Offset3D, pub extent: Extent3D, } impl ImageResolve { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offset(mut self, src_offset: Offset3D) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShaderModuleCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ShaderModuleCreateFlags, pub code_size: usize, pub p_code: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ShaderModuleCreateInfo<'_> {} unsafe impl Sync for ShaderModuleCreateInfo<'_> {} impl ::core::default::Default for ShaderModuleCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ShaderModuleCreateFlags::default(), code_size: usize::default(), p_code: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ShaderModuleCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SHADER_MODULE_CREATE_INFO; } unsafe impl ExtendsPipelineShaderStageCreateInfo for ShaderModuleCreateInfo<'_> {} pub unsafe trait ExtendsShaderModuleCreateInfo {} impl<'a> ShaderModuleCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: ShaderModuleCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn code(mut self, code: &'a [u32]) -> Self { self.code_size = code.len() * 4; self.p_code = code.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetLayoutBinding<'a> { pub binding: u32, pub descriptor_type: DescriptorType, pub descriptor_count: u32, pub stage_flags: ShaderStageFlags, pub p_immutable_samplers: *const Sampler, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetLayoutBinding<'_> {} unsafe impl Sync for DescriptorSetLayoutBinding<'_> {} impl ::core::default::Default for DescriptorSetLayoutBinding<'_> { #[inline] fn default() -> Self { Self { binding: u32::default(), descriptor_type: DescriptorType::default(), descriptor_count: u32::default(), stage_flags: ShaderStageFlags::default(), p_immutable_samplers: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> DescriptorSetLayoutBinding<'a> { #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self { self.descriptor_type = descriptor_type; self } #[inline] pub fn descriptor_count(mut self, descriptor_count: u32) -> Self { self.descriptor_count = descriptor_count; self } #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn immutable_samplers(mut self, immutable_samplers: &'a [Sampler]) -> Self { self.descriptor_count = immutable_samplers.len() as _; self.p_immutable_samplers = immutable_samplers.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetLayoutCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DescriptorSetLayoutCreateFlags, pub binding_count: u32, pub p_bindings: *const DescriptorSetLayoutBinding<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetLayoutCreateInfo<'_> {} unsafe impl Sync for DescriptorSetLayoutCreateInfo<'_> {} impl ::core::default::Default for DescriptorSetLayoutCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DescriptorSetLayoutCreateFlags::default(), binding_count: u32::default(), p_bindings: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetLayoutCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_CREATE_INFO; } pub unsafe trait ExtendsDescriptorSetLayoutCreateInfo {} impl<'a> DescriptorSetLayoutCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: DescriptorSetLayoutCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn bindings(mut self, bindings: &'a [DescriptorSetLayoutBinding<'a>]) -> Self { self.binding_count = bindings.len() as _; self.p_bindings = bindings.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DescriptorPoolSize { pub ty: DescriptorType, pub descriptor_count: u32, } impl DescriptorPoolSize { #[inline] pub fn ty(mut self, ty: DescriptorType) -> Self { self.ty = ty; self } #[inline] pub fn descriptor_count(mut self, descriptor_count: u32) -> Self { self.descriptor_count = descriptor_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorPoolCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DescriptorPoolCreateFlags, pub max_sets: u32, pub pool_size_count: u32, pub p_pool_sizes: *const DescriptorPoolSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorPoolCreateInfo<'_> {} unsafe impl Sync for DescriptorPoolCreateInfo<'_> {} impl ::core::default::Default for DescriptorPoolCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DescriptorPoolCreateFlags::default(), max_sets: u32::default(), pool_size_count: u32::default(), p_pool_sizes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorPoolCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_POOL_CREATE_INFO; } pub unsafe trait ExtendsDescriptorPoolCreateInfo {} impl<'a> DescriptorPoolCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: DescriptorPoolCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn max_sets(mut self, max_sets: u32) -> Self { self.max_sets = max_sets; self } #[inline] pub fn pool_sizes(mut self, pool_sizes: &'a [DescriptorPoolSize]) -> Self { self.pool_size_count = pool_sizes.len() as _; self.p_pool_sizes = pool_sizes.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub descriptor_pool: DescriptorPool, pub descriptor_set_count: u32, pub p_set_layouts: *const DescriptorSetLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetAllocateInfo<'_> {} unsafe impl Sync for DescriptorSetAllocateInfo<'_> {} impl ::core::default::Default for DescriptorSetAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), descriptor_pool: DescriptorPool::default(), descriptor_set_count: u32::default(), p_set_layouts: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_ALLOCATE_INFO; } pub unsafe trait ExtendsDescriptorSetAllocateInfo {} impl<'a> DescriptorSetAllocateInfo<'a> { #[inline] pub fn descriptor_pool(mut self, descriptor_pool: DescriptorPool) -> Self { self.descriptor_pool = descriptor_pool; self } #[inline] pub fn set_layouts(mut self, set_layouts: &'a [DescriptorSetLayout]) -> Self { self.descriptor_set_count = set_layouts.len() as _; self.p_set_layouts = set_layouts.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SpecializationMapEntry { pub constant_id: u32, pub offset: u32, pub size: usize, } impl SpecializationMapEntry { #[inline] pub fn constant_id(mut self, constant_id: u32) -> Self { self.constant_id = constant_id; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: usize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SpecializationInfo<'a> { pub map_entry_count: u32, pub p_map_entries: *const SpecializationMapEntry, pub data_size: usize, pub p_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SpecializationInfo<'_> {} unsafe impl Sync for SpecializationInfo<'_> {} impl ::core::default::Default for SpecializationInfo<'_> { #[inline] fn default() -> Self { Self { map_entry_count: u32::default(), p_map_entries: ::core::ptr::null(), data_size: usize::default(), p_data: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> SpecializationInfo<'a> { #[inline] pub fn map_entries(mut self, map_entries: &'a [SpecializationMapEntry]) -> Self { self.map_entry_count = map_entries.len() as _; self.p_map_entries = map_entries.as_ptr(); self } #[inline] pub fn data(mut self, data: &'a [u8]) -> Self { self.data_size = data.len(); self.p_data = data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineShaderStageCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineShaderStageCreateFlags, pub stage: ShaderStageFlags, pub module: ShaderModule, pub p_name: *const c_char, pub p_specialization_info: *const SpecializationInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineShaderStageCreateInfo<'_> {} unsafe impl Sync for PipelineShaderStageCreateInfo<'_> {} impl ::core::default::Default for PipelineShaderStageCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineShaderStageCreateFlags::default(), stage: ShaderStageFlags::default(), module: ShaderModule::default(), p_name: ::core::ptr::null(), p_specialization_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineShaderStageCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO; } pub unsafe trait ExtendsPipelineShaderStageCreateInfo {} impl<'a> PipelineShaderStageCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineShaderStageCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stage(mut self, stage: ShaderStageFlags) -> Self { self.stage = stage; self } #[inline] pub fn module(mut self, module: ShaderModule) -> Self { self.module = module; self } #[inline] pub fn name(mut self, name: &'a CStr) -> Self { self.p_name = name.as_ptr(); self } #[inline] pub unsafe fn name_as_c_str(&self) -> Option<&CStr> { if self.p_name.is_null() { None } else { Some(CStr::from_ptr(self.p_name)) } } #[inline] pub fn specialization_info(mut self, specialization_info: &'a SpecializationInfo<'a>) -> Self { self.p_specialization_info = specialization_info; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ComputePipelineCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags, pub stage: PipelineShaderStageCreateInfo<'a>, pub layout: PipelineLayout, pub base_pipeline_handle: Pipeline, pub base_pipeline_index: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ComputePipelineCreateInfo<'_> {} unsafe impl Sync for ComputePipelineCreateInfo<'_> {} impl ::core::default::Default for ComputePipelineCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags::default(), stage: PipelineShaderStageCreateInfo::default(), layout: PipelineLayout::default(), base_pipeline_handle: Pipeline::default(), base_pipeline_index: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ComputePipelineCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMPUTE_PIPELINE_CREATE_INFO; } pub unsafe trait ExtendsComputePipelineCreateInfo {} impl<'a> ComputePipelineCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stage(mut self, stage: PipelineShaderStageCreateInfo<'a>) -> Self { self.stage = stage; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self { self.base_pipeline_handle = base_pipeline_handle; self } #[inline] pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self { self.base_pipeline_index = base_pipeline_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ComputePipelineIndirectBufferInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_address: DeviceAddress, pub size: DeviceSize, pub pipeline_device_address_capture_replay: DeviceAddress, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ComputePipelineIndirectBufferInfoNV<'_> {} unsafe impl Sync for ComputePipelineIndirectBufferInfoNV<'_> {} impl ::core::default::Default for ComputePipelineIndirectBufferInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_address: DeviceAddress::default(), size: DeviceSize::default(), pipeline_device_address_capture_replay: DeviceAddress::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ComputePipelineIndirectBufferInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV; } unsafe impl ExtendsComputePipelineCreateInfo for ComputePipelineIndirectBufferInfoNV<'_> {} impl<'a> ComputePipelineIndirectBufferInfoNV<'a> { #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn pipeline_device_address_capture_replay( mut self, pipeline_device_address_capture_replay: DeviceAddress, ) -> Self { self.pipeline_device_address_capture_replay = pipeline_device_address_capture_replay; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCreateFlags2CreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags2KHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCreateFlags2CreateInfoKHR<'_> {} unsafe impl Sync for PipelineCreateFlags2CreateInfoKHR<'_> {} impl ::core::default::Default for PipelineCreateFlags2CreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags2KHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCreateFlags2CreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR; } unsafe impl ExtendsComputePipelineCreateInfo for PipelineCreateFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCreateFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsRayTracingPipelineCreateInfoNV for PipelineCreateFlags2CreateInfoKHR<'_> {} unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineCreateFlags2CreateInfoKHR<'_> {} impl<'a> PipelineCreateFlags2CreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags2KHR) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VertexInputBindingDescription { pub binding: u32, pub stride: u32, pub input_rate: VertexInputRate, } impl VertexInputBindingDescription { #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn stride(mut self, stride: u32) -> Self { self.stride = stride; self } #[inline] pub fn input_rate(mut self, input_rate: VertexInputRate) -> Self { self.input_rate = input_rate; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VertexInputAttributeDescription { pub location: u32, pub binding: u32, pub format: Format, pub offset: u32, } impl VertexInputAttributeDescription { #[inline] pub fn location(mut self, location: u32) -> Self { self.location = location; self } #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineVertexInputStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineVertexInputStateCreateFlags, pub vertex_binding_description_count: u32, pub p_vertex_binding_descriptions: *const VertexInputBindingDescription, pub vertex_attribute_description_count: u32, pub p_vertex_attribute_descriptions: *const VertexInputAttributeDescription, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineVertexInputStateCreateInfo<'_> {} unsafe impl Sync for PipelineVertexInputStateCreateInfo<'_> {} impl ::core::default::Default for PipelineVertexInputStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineVertexInputStateCreateFlags::default(), vertex_binding_description_count: u32::default(), p_vertex_binding_descriptions: ::core::ptr::null(), vertex_attribute_description_count: u32::default(), p_vertex_attribute_descriptions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineVertexInputStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineVertexInputStateCreateInfo {} impl<'a> PipelineVertexInputStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineVertexInputStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn vertex_binding_descriptions( mut self, vertex_binding_descriptions: &'a [VertexInputBindingDescription], ) -> Self { self.vertex_binding_description_count = vertex_binding_descriptions.len() as _; self.p_vertex_binding_descriptions = vertex_binding_descriptions.as_ptr(); self } #[inline] pub fn vertex_attribute_descriptions( mut self, vertex_attribute_descriptions: &'a [VertexInputAttributeDescription], ) -> Self { self.vertex_attribute_description_count = vertex_attribute_descriptions.len() as _; self.p_vertex_attribute_descriptions = vertex_attribute_descriptions.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineInputAssemblyStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineInputAssemblyStateCreateFlags, pub topology: PrimitiveTopology, pub primitive_restart_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineInputAssemblyStateCreateInfo<'_> {} unsafe impl Sync for PipelineInputAssemblyStateCreateInfo<'_> {} impl ::core::default::Default for PipelineInputAssemblyStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineInputAssemblyStateCreateFlags::default(), topology: PrimitiveTopology::default(), primitive_restart_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineInputAssemblyStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; } impl<'a> PipelineInputAssemblyStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineInputAssemblyStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn topology(mut self, topology: PrimitiveTopology) -> Self { self.topology = topology; self } #[inline] pub fn primitive_restart_enable(mut self, primitive_restart_enable: bool) -> Self { self.primitive_restart_enable = primitive_restart_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineTessellationStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineTessellationStateCreateFlags, pub patch_control_points: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineTessellationStateCreateInfo<'_> {} unsafe impl Sync for PipelineTessellationStateCreateInfo<'_> {} impl ::core::default::Default for PipelineTessellationStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineTessellationStateCreateFlags::default(), patch_control_points: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineTessellationStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_TESSELLATION_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineTessellationStateCreateInfo {} impl<'a> PipelineTessellationStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineTessellationStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn patch_control_points(mut self, patch_control_points: u32) -> Self { self.patch_control_points = patch_control_points; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineViewportStateCreateFlags, pub viewport_count: u32, pub p_viewports: *const Viewport, pub scissor_count: u32, pub p_scissors: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportStateCreateInfo<'_> {} unsafe impl Sync for PipelineViewportStateCreateInfo<'_> {} impl ::core::default::Default for PipelineViewportStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineViewportStateCreateFlags::default(), viewport_count: u32::default(), p_viewports: ::core::ptr::null(), scissor_count: u32::default(), p_scissors: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineViewportStateCreateInfo {} impl<'a> PipelineViewportStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineViewportStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn viewport_count(mut self, viewport_count: u32) -> Self { self.viewport_count = viewport_count; self } #[inline] pub fn viewports(mut self, viewports: &'a [Viewport]) -> Self { self.viewport_count = viewports.len() as _; self.p_viewports = viewports.as_ptr(); self } #[inline] pub fn scissor_count(mut self, scissor_count: u32) -> Self { self.scissor_count = scissor_count; self } #[inline] pub fn scissors(mut self, scissors: &'a [Rect2D]) -> Self { self.scissor_count = scissors.len() as _; self.p_scissors = scissors.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineRasterizationStateCreateFlags, pub depth_clamp_enable: Bool32, pub rasterizer_discard_enable: Bool32, pub polygon_mode: PolygonMode, pub cull_mode: CullModeFlags, pub front_face: FrontFace, pub depth_bias_enable: Bool32, pub depth_bias_constant_factor: f32, pub depth_bias_clamp: f32, pub depth_bias_slope_factor: f32, pub line_width: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationStateCreateInfo<'_> {} unsafe impl Sync for PipelineRasterizationStateCreateInfo<'_> {} impl ::core::default::Default for PipelineRasterizationStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineRasterizationStateCreateFlags::default(), depth_clamp_enable: Bool32::default(), rasterizer_discard_enable: Bool32::default(), polygon_mode: PolygonMode::default(), cull_mode: CullModeFlags::default(), front_face: FrontFace::default(), depth_bias_enable: Bool32::default(), depth_bias_constant_factor: f32::default(), depth_bias_clamp: f32::default(), depth_bias_slope_factor: f32::default(), line_width: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineRasterizationStateCreateInfo {} impl<'a> PipelineRasterizationStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineRasterizationStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn depth_clamp_enable(mut self, depth_clamp_enable: bool) -> Self { self.depth_clamp_enable = depth_clamp_enable.into(); self } #[inline] pub fn rasterizer_discard_enable(mut self, rasterizer_discard_enable: bool) -> Self { self.rasterizer_discard_enable = rasterizer_discard_enable.into(); self } #[inline] pub fn polygon_mode(mut self, polygon_mode: PolygonMode) -> Self { self.polygon_mode = polygon_mode; self } #[inline] pub fn cull_mode(mut self, cull_mode: CullModeFlags) -> Self { self.cull_mode = cull_mode; self } #[inline] pub fn front_face(mut self, front_face: FrontFace) -> Self { self.front_face = front_face; self } #[inline] pub fn depth_bias_enable(mut self, depth_bias_enable: bool) -> Self { self.depth_bias_enable = depth_bias_enable.into(); self } #[inline] pub fn depth_bias_constant_factor(mut self, depth_bias_constant_factor: f32) -> Self { self.depth_bias_constant_factor = depth_bias_constant_factor; self } #[inline] pub fn depth_bias_clamp(mut self, depth_bias_clamp: f32) -> Self { self.depth_bias_clamp = depth_bias_clamp; self } #[inline] pub fn depth_bias_slope_factor(mut self, depth_bias_slope_factor: f32) -> Self { self.depth_bias_slope_factor = depth_bias_slope_factor; self } #[inline] pub fn line_width(mut self, line_width: f32) -> Self { self.line_width = line_width; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineMultisampleStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineMultisampleStateCreateFlags, pub rasterization_samples: SampleCountFlags, pub sample_shading_enable: Bool32, pub min_sample_shading: f32, pub p_sample_mask: *const SampleMask, pub alpha_to_coverage_enable: Bool32, pub alpha_to_one_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineMultisampleStateCreateInfo<'_> {} unsafe impl Sync for PipelineMultisampleStateCreateInfo<'_> {} impl ::core::default::Default for PipelineMultisampleStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineMultisampleStateCreateFlags::default(), rasterization_samples: SampleCountFlags::default(), sample_shading_enable: Bool32::default(), min_sample_shading: f32::default(), p_sample_mask: ::core::ptr::null(), alpha_to_coverage_enable: Bool32::default(), alpha_to_one_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineMultisampleStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineMultisampleStateCreateInfo {} impl<'a> PipelineMultisampleStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineMultisampleStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self { self.rasterization_samples = rasterization_samples; self } #[inline] pub fn sample_shading_enable(mut self, sample_shading_enable: bool) -> Self { self.sample_shading_enable = sample_shading_enable.into(); self } #[inline] pub fn min_sample_shading(mut self, min_sample_shading: f32) -> Self { self.min_sample_shading = min_sample_shading; self } #[doc = r" Sets `p_sample_mask` to `null` if the slice is empty. The mask will"] #[doc = r" be treated as if it has all bits set to `1`."] #[doc = r""] #[doc = r" See "] #[doc = r" for more details."] #[inline] pub fn sample_mask(mut self, sample_mask: &'a [SampleMask]) -> Self { self.p_sample_mask = if sample_mask.is_empty() { core::ptr::null() } else { sample_mask.as_ptr() }; self } #[inline] pub fn alpha_to_coverage_enable(mut self, alpha_to_coverage_enable: bool) -> Self { self.alpha_to_coverage_enable = alpha_to_coverage_enable.into(); self } #[inline] pub fn alpha_to_one_enable(mut self, alpha_to_one_enable: bool) -> Self { self.alpha_to_one_enable = alpha_to_one_enable.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PipelineColorBlendAttachmentState { pub blend_enable: Bool32, pub src_color_blend_factor: BlendFactor, pub dst_color_blend_factor: BlendFactor, pub color_blend_op: BlendOp, pub src_alpha_blend_factor: BlendFactor, pub dst_alpha_blend_factor: BlendFactor, pub alpha_blend_op: BlendOp, pub color_write_mask: ColorComponentFlags, } impl PipelineColorBlendAttachmentState { #[inline] pub fn blend_enable(mut self, blend_enable: bool) -> Self { self.blend_enable = blend_enable.into(); self } #[inline] pub fn src_color_blend_factor(mut self, src_color_blend_factor: BlendFactor) -> Self { self.src_color_blend_factor = src_color_blend_factor; self } #[inline] pub fn dst_color_blend_factor(mut self, dst_color_blend_factor: BlendFactor) -> Self { self.dst_color_blend_factor = dst_color_blend_factor; self } #[inline] pub fn color_blend_op(mut self, color_blend_op: BlendOp) -> Self { self.color_blend_op = color_blend_op; self } #[inline] pub fn src_alpha_blend_factor(mut self, src_alpha_blend_factor: BlendFactor) -> Self { self.src_alpha_blend_factor = src_alpha_blend_factor; self } #[inline] pub fn dst_alpha_blend_factor(mut self, dst_alpha_blend_factor: BlendFactor) -> Self { self.dst_alpha_blend_factor = dst_alpha_blend_factor; self } #[inline] pub fn alpha_blend_op(mut self, alpha_blend_op: BlendOp) -> Self { self.alpha_blend_op = alpha_blend_op; self } #[inline] pub fn color_write_mask(mut self, color_write_mask: ColorComponentFlags) -> Self { self.color_write_mask = color_write_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineColorBlendStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineColorBlendStateCreateFlags, pub logic_op_enable: Bool32, pub logic_op: LogicOp, pub attachment_count: u32, pub p_attachments: *const PipelineColorBlendAttachmentState, pub blend_constants: [f32; 4], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineColorBlendStateCreateInfo<'_> {} unsafe impl Sync for PipelineColorBlendStateCreateInfo<'_> {} impl ::core::default::Default for PipelineColorBlendStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineColorBlendStateCreateFlags::default(), logic_op_enable: Bool32::default(), logic_op: LogicOp::default(), attachment_count: u32::default(), p_attachments: ::core::ptr::null(), blend_constants: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineColorBlendStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; } pub unsafe trait ExtendsPipelineColorBlendStateCreateInfo {} impl<'a> PipelineColorBlendStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineColorBlendStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn logic_op_enable(mut self, logic_op_enable: bool) -> Self { self.logic_op_enable = logic_op_enable.into(); self } #[inline] pub fn logic_op(mut self, logic_op: LogicOp) -> Self { self.logic_op = logic_op; self } #[inline] pub fn attachments(mut self, attachments: &'a [PipelineColorBlendAttachmentState]) -> Self { self.attachment_count = attachments.len() as _; self.p_attachments = attachments.as_ptr(); self } #[inline] pub fn blend_constants(mut self, blend_constants: [f32; 4]) -> Self { self.blend_constants = blend_constants; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineDynamicStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineDynamicStateCreateFlags, pub dynamic_state_count: u32, pub p_dynamic_states: *const DynamicState, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineDynamicStateCreateInfo<'_> {} unsafe impl Sync for PipelineDynamicStateCreateInfo<'_> {} impl ::core::default::Default for PipelineDynamicStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineDynamicStateCreateFlags::default(), dynamic_state_count: u32::default(), p_dynamic_states: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineDynamicStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_DYNAMIC_STATE_CREATE_INFO; } impl<'a> PipelineDynamicStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineDynamicStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn dynamic_states(mut self, dynamic_states: &'a [DynamicState]) -> Self { self.dynamic_state_count = dynamic_states.len() as _; self.p_dynamic_states = dynamic_states.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct StencilOpState { pub fail_op: StencilOp, pub pass_op: StencilOp, pub depth_fail_op: StencilOp, pub compare_op: CompareOp, pub compare_mask: u32, pub write_mask: u32, pub reference: u32, } impl StencilOpState { #[inline] pub fn fail_op(mut self, fail_op: StencilOp) -> Self { self.fail_op = fail_op; self } #[inline] pub fn pass_op(mut self, pass_op: StencilOp) -> Self { self.pass_op = pass_op; self } #[inline] pub fn depth_fail_op(mut self, depth_fail_op: StencilOp) -> Self { self.depth_fail_op = depth_fail_op; self } #[inline] pub fn compare_op(mut self, compare_op: CompareOp) -> Self { self.compare_op = compare_op; self } #[inline] pub fn compare_mask(mut self, compare_mask: u32) -> Self { self.compare_mask = compare_mask; self } #[inline] pub fn write_mask(mut self, write_mask: u32) -> Self { self.write_mask = write_mask; self } #[inline] pub fn reference(mut self, reference: u32) -> Self { self.reference = reference; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineDepthStencilStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineDepthStencilStateCreateFlags, pub depth_test_enable: Bool32, pub depth_write_enable: Bool32, pub depth_compare_op: CompareOp, pub depth_bounds_test_enable: Bool32, pub stencil_test_enable: Bool32, pub front: StencilOpState, pub back: StencilOpState, pub min_depth_bounds: f32, pub max_depth_bounds: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineDepthStencilStateCreateInfo<'_> {} unsafe impl Sync for PipelineDepthStencilStateCreateInfo<'_> {} impl ::core::default::Default for PipelineDepthStencilStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineDepthStencilStateCreateFlags::default(), depth_test_enable: Bool32::default(), depth_write_enable: Bool32::default(), depth_compare_op: CompareOp::default(), depth_bounds_test_enable: Bool32::default(), stencil_test_enable: Bool32::default(), front: StencilOpState::default(), back: StencilOpState::default(), min_depth_bounds: f32::default(), max_depth_bounds: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineDepthStencilStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; } impl<'a> PipelineDepthStencilStateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineDepthStencilStateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn depth_test_enable(mut self, depth_test_enable: bool) -> Self { self.depth_test_enable = depth_test_enable.into(); self } #[inline] pub fn depth_write_enable(mut self, depth_write_enable: bool) -> Self { self.depth_write_enable = depth_write_enable.into(); self } #[inline] pub fn depth_compare_op(mut self, depth_compare_op: CompareOp) -> Self { self.depth_compare_op = depth_compare_op; self } #[inline] pub fn depth_bounds_test_enable(mut self, depth_bounds_test_enable: bool) -> Self { self.depth_bounds_test_enable = depth_bounds_test_enable.into(); self } #[inline] pub fn stencil_test_enable(mut self, stencil_test_enable: bool) -> Self { self.stencil_test_enable = stencil_test_enable.into(); self } #[inline] pub fn front(mut self, front: StencilOpState) -> Self { self.front = front; self } #[inline] pub fn back(mut self, back: StencilOpState) -> Self { self.back = back; self } #[inline] pub fn min_depth_bounds(mut self, min_depth_bounds: f32) -> Self { self.min_depth_bounds = min_depth_bounds; self } #[inline] pub fn max_depth_bounds(mut self, max_depth_bounds: f32) -> Self { self.max_depth_bounds = max_depth_bounds; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GraphicsPipelineCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags, pub stage_count: u32, pub p_stages: *const PipelineShaderStageCreateInfo<'a>, pub p_vertex_input_state: *const PipelineVertexInputStateCreateInfo<'a>, pub p_input_assembly_state: *const PipelineInputAssemblyStateCreateInfo<'a>, pub p_tessellation_state: *const PipelineTessellationStateCreateInfo<'a>, pub p_viewport_state: *const PipelineViewportStateCreateInfo<'a>, pub p_rasterization_state: *const PipelineRasterizationStateCreateInfo<'a>, pub p_multisample_state: *const PipelineMultisampleStateCreateInfo<'a>, pub p_depth_stencil_state: *const PipelineDepthStencilStateCreateInfo<'a>, pub p_color_blend_state: *const PipelineColorBlendStateCreateInfo<'a>, pub p_dynamic_state: *const PipelineDynamicStateCreateInfo<'a>, pub layout: PipelineLayout, pub render_pass: RenderPass, pub subpass: u32, pub base_pipeline_handle: Pipeline, pub base_pipeline_index: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GraphicsPipelineCreateInfo<'_> {} unsafe impl Sync for GraphicsPipelineCreateInfo<'_> {} impl ::core::default::Default for GraphicsPipelineCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags::default(), stage_count: u32::default(), p_stages: ::core::ptr::null(), p_vertex_input_state: ::core::ptr::null(), p_input_assembly_state: ::core::ptr::null(), p_tessellation_state: ::core::ptr::null(), p_viewport_state: ::core::ptr::null(), p_rasterization_state: ::core::ptr::null(), p_multisample_state: ::core::ptr::null(), p_depth_stencil_state: ::core::ptr::null(), p_color_blend_state: ::core::ptr::null(), p_dynamic_state: ::core::ptr::null(), layout: PipelineLayout::default(), render_pass: RenderPass::default(), subpass: u32::default(), base_pipeline_handle: Pipeline::default(), base_pipeline_index: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GraphicsPipelineCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_PIPELINE_CREATE_INFO; } pub unsafe trait ExtendsGraphicsPipelineCreateInfo {} impl<'a> GraphicsPipelineCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo<'a>]) -> Self { self.stage_count = stages.len() as _; self.p_stages = stages.as_ptr(); self } #[inline] pub fn vertex_input_state( mut self, vertex_input_state: &'a PipelineVertexInputStateCreateInfo<'a>, ) -> Self { self.p_vertex_input_state = vertex_input_state; self } #[inline] pub fn input_assembly_state( mut self, input_assembly_state: &'a PipelineInputAssemblyStateCreateInfo<'a>, ) -> Self { self.p_input_assembly_state = input_assembly_state; self } #[inline] pub fn tessellation_state( mut self, tessellation_state: &'a PipelineTessellationStateCreateInfo<'a>, ) -> Self { self.p_tessellation_state = tessellation_state; self } #[inline] pub fn viewport_state( mut self, viewport_state: &'a PipelineViewportStateCreateInfo<'a>, ) -> Self { self.p_viewport_state = viewport_state; self } #[inline] pub fn rasterization_state( mut self, rasterization_state: &'a PipelineRasterizationStateCreateInfo<'a>, ) -> Self { self.p_rasterization_state = rasterization_state; self } #[inline] pub fn multisample_state( mut self, multisample_state: &'a PipelineMultisampleStateCreateInfo<'a>, ) -> Self { self.p_multisample_state = multisample_state; self } #[inline] pub fn depth_stencil_state( mut self, depth_stencil_state: &'a PipelineDepthStencilStateCreateInfo<'a>, ) -> Self { self.p_depth_stencil_state = depth_stencil_state; self } #[inline] pub fn color_blend_state( mut self, color_blend_state: &'a PipelineColorBlendStateCreateInfo<'a>, ) -> Self { self.p_color_blend_state = color_blend_state; self } #[inline] pub fn dynamic_state(mut self, dynamic_state: &'a PipelineDynamicStateCreateInfo<'a>) -> Self { self.p_dynamic_state = dynamic_state; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn render_pass(mut self, render_pass: RenderPass) -> Self { self.render_pass = render_pass; self } #[inline] pub fn subpass(mut self, subpass: u32) -> Self { self.subpass = subpass; self } #[inline] pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self { self.base_pipeline_handle = base_pipeline_handle; self } #[inline] pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self { self.base_pipeline_index = base_pipeline_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCacheCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCacheCreateFlags, pub initial_data_size: usize, pub p_initial_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCacheCreateInfo<'_> {} unsafe impl Sync for PipelineCacheCreateInfo<'_> {} impl ::core::default::Default for PipelineCacheCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCacheCreateFlags::default(), initial_data_size: usize::default(), p_initial_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCacheCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_CACHE_CREATE_INFO; } impl<'a> PipelineCacheCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineCacheCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn initial_data(mut self, initial_data: &'a [u8]) -> Self { self.initial_data_size = initial_data.len(); self.p_initial_data = initial_data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCacheHeaderVersionOne { pub header_size: u32, pub header_version: PipelineCacheHeaderVersion, pub vendor_id: u32, pub device_id: u32, pub pipeline_cache_uuid: [u8; UUID_SIZE], } impl ::core::default::Default for PipelineCacheHeaderVersionOne { #[inline] fn default() -> Self { Self { header_size: u32::default(), header_version: PipelineCacheHeaderVersion::default(), vendor_id: u32::default(), device_id: u32::default(), pipeline_cache_uuid: unsafe { ::core::mem::zeroed() }, } } } impl PipelineCacheHeaderVersionOne { #[inline] pub fn header_size(mut self, header_size: u32) -> Self { self.header_size = header_size; self } #[inline] pub fn header_version(mut self, header_version: PipelineCacheHeaderVersion) -> Self { self.header_version = header_version; self } #[inline] pub fn vendor_id(mut self, vendor_id: u32) -> Self { self.vendor_id = vendor_id; self } #[inline] pub fn device_id(mut self, device_id: u32) -> Self { self.device_id = device_id; self } #[inline] pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self { self.pipeline_cache_uuid = pipeline_cache_uuid; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PushConstantRange { pub stage_flags: ShaderStageFlags, pub offset: u32, pub size: u32, } impl PushConstantRange { #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: u32) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineLayoutCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineLayoutCreateFlags, pub set_layout_count: u32, pub p_set_layouts: *const DescriptorSetLayout, pub push_constant_range_count: u32, pub p_push_constant_ranges: *const PushConstantRange, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineLayoutCreateInfo<'_> {} unsafe impl Sync for PipelineLayoutCreateInfo<'_> {} impl ::core::default::Default for PipelineLayoutCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineLayoutCreateFlags::default(), set_layout_count: u32::default(), p_set_layouts: ::core::ptr::null(), push_constant_range_count: u32::default(), p_push_constant_ranges: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineLayoutCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_LAYOUT_CREATE_INFO; } unsafe impl ExtendsBindDescriptorSetsInfoKHR for PipelineLayoutCreateInfo<'_> {} unsafe impl ExtendsPushConstantsInfoKHR for PipelineLayoutCreateInfo<'_> {} unsafe impl ExtendsPushDescriptorSetInfoKHR for PipelineLayoutCreateInfo<'_> {} unsafe impl ExtendsPushDescriptorSetWithTemplateInfoKHR for PipelineLayoutCreateInfo<'_> {} unsafe impl ExtendsSetDescriptorBufferOffsetsInfoEXT for PipelineLayoutCreateInfo<'_> {} unsafe impl ExtendsBindDescriptorBufferEmbeddedSamplersInfoEXT for PipelineLayoutCreateInfo<'_> {} impl<'a> PipelineLayoutCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PipelineLayoutCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn set_layouts(mut self, set_layouts: &'a [DescriptorSetLayout]) -> Self { self.set_layout_count = set_layouts.len() as _; self.p_set_layouts = set_layouts.as_ptr(); self } #[inline] pub fn push_constant_ranges(mut self, push_constant_ranges: &'a [PushConstantRange]) -> Self { self.push_constant_range_count = push_constant_ranges.len() as _; self.p_push_constant_ranges = push_constant_ranges.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SamplerCreateFlags, pub mag_filter: Filter, pub min_filter: Filter, pub mipmap_mode: SamplerMipmapMode, pub address_mode_u: SamplerAddressMode, pub address_mode_v: SamplerAddressMode, pub address_mode_w: SamplerAddressMode, pub mip_lod_bias: f32, pub anisotropy_enable: Bool32, pub max_anisotropy: f32, pub compare_enable: Bool32, pub compare_op: CompareOp, pub min_lod: f32, pub max_lod: f32, pub border_color: BorderColor, pub unnormalized_coordinates: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerCreateInfo<'_> {} unsafe impl Sync for SamplerCreateInfo<'_> {} impl ::core::default::Default for SamplerCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SamplerCreateFlags::default(), mag_filter: Filter::default(), min_filter: Filter::default(), mipmap_mode: SamplerMipmapMode::default(), address_mode_u: SamplerAddressMode::default(), address_mode_v: SamplerAddressMode::default(), address_mode_w: SamplerAddressMode::default(), mip_lod_bias: f32::default(), anisotropy_enable: Bool32::default(), max_anisotropy: f32::default(), compare_enable: Bool32::default(), compare_op: CompareOp::default(), min_lod: f32::default(), max_lod: f32::default(), border_color: BorderColor::default(), unnormalized_coordinates: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CREATE_INFO; } pub unsafe trait ExtendsSamplerCreateInfo {} impl<'a> SamplerCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: SamplerCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn mag_filter(mut self, mag_filter: Filter) -> Self { self.mag_filter = mag_filter; self } #[inline] pub fn min_filter(mut self, min_filter: Filter) -> Self { self.min_filter = min_filter; self } #[inline] pub fn mipmap_mode(mut self, mipmap_mode: SamplerMipmapMode) -> Self { self.mipmap_mode = mipmap_mode; self } #[inline] pub fn address_mode_u(mut self, address_mode_u: SamplerAddressMode) -> Self { self.address_mode_u = address_mode_u; self } #[inline] pub fn address_mode_v(mut self, address_mode_v: SamplerAddressMode) -> Self { self.address_mode_v = address_mode_v; self } #[inline] pub fn address_mode_w(mut self, address_mode_w: SamplerAddressMode) -> Self { self.address_mode_w = address_mode_w; self } #[inline] pub fn mip_lod_bias(mut self, mip_lod_bias: f32) -> Self { self.mip_lod_bias = mip_lod_bias; self } #[inline] pub fn anisotropy_enable(mut self, anisotropy_enable: bool) -> Self { self.anisotropy_enable = anisotropy_enable.into(); self } #[inline] pub fn max_anisotropy(mut self, max_anisotropy: f32) -> Self { self.max_anisotropy = max_anisotropy; self } #[inline] pub fn compare_enable(mut self, compare_enable: bool) -> Self { self.compare_enable = compare_enable.into(); self } #[inline] pub fn compare_op(mut self, compare_op: CompareOp) -> Self { self.compare_op = compare_op; self } #[inline] pub fn min_lod(mut self, min_lod: f32) -> Self { self.min_lod = min_lod; self } #[inline] pub fn max_lod(mut self, max_lod: f32) -> Self { self.max_lod = max_lod; self } #[inline] pub fn border_color(mut self, border_color: BorderColor) -> Self { self.border_color = border_color; self } #[inline] pub fn unnormalized_coordinates(mut self, unnormalized_coordinates: bool) -> Self { self.unnormalized_coordinates = unnormalized_coordinates.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandPoolCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: CommandPoolCreateFlags, pub queue_family_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandPoolCreateInfo<'_> {} unsafe impl Sync for CommandPoolCreateInfo<'_> {} impl ::core::default::Default for CommandPoolCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: CommandPoolCreateFlags::default(), queue_family_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandPoolCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_POOL_CREATE_INFO; } impl<'a> CommandPoolCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: CommandPoolCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn queue_family_index(mut self, queue_family_index: u32) -> Self { self.queue_family_index = queue_family_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub command_pool: CommandPool, pub level: CommandBufferLevel, pub command_buffer_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferAllocateInfo<'_> {} unsafe impl Sync for CommandBufferAllocateInfo<'_> {} impl ::core::default::Default for CommandBufferAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), command_pool: CommandPool::default(), level: CommandBufferLevel::default(), command_buffer_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_ALLOCATE_INFO; } impl<'a> CommandBufferAllocateInfo<'a> { #[inline] pub fn command_pool(mut self, command_pool: CommandPool) -> Self { self.command_pool = command_pool; self } #[inline] pub fn level(mut self, level: CommandBufferLevel) -> Self { self.level = level; self } #[inline] pub fn command_buffer_count(mut self, command_buffer_count: u32) -> Self { self.command_buffer_count = command_buffer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferInheritanceInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub render_pass: RenderPass, pub subpass: u32, pub framebuffer: Framebuffer, pub occlusion_query_enable: Bool32, pub query_flags: QueryControlFlags, pub pipeline_statistics: QueryPipelineStatisticFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferInheritanceInfo<'_> {} unsafe impl Sync for CommandBufferInheritanceInfo<'_> {} impl ::core::default::Default for CommandBufferInheritanceInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), render_pass: RenderPass::default(), subpass: u32::default(), framebuffer: Framebuffer::default(), occlusion_query_enable: Bool32::default(), query_flags: QueryControlFlags::default(), pipeline_statistics: QueryPipelineStatisticFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferInheritanceInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_INFO; } pub unsafe trait ExtendsCommandBufferInheritanceInfo {} impl<'a> CommandBufferInheritanceInfo<'a> { #[inline] pub fn render_pass(mut self, render_pass: RenderPass) -> Self { self.render_pass = render_pass; self } #[inline] pub fn subpass(mut self, subpass: u32) -> Self { self.subpass = subpass; self } #[inline] pub fn framebuffer(mut self, framebuffer: Framebuffer) -> Self { self.framebuffer = framebuffer; self } #[inline] pub fn occlusion_query_enable(mut self, occlusion_query_enable: bool) -> Self { self.occlusion_query_enable = occlusion_query_enable.into(); self } #[inline] pub fn query_flags(mut self, query_flags: QueryControlFlags) -> Self { self.query_flags = query_flags; self } #[inline] pub fn pipeline_statistics(mut self, pipeline_statistics: QueryPipelineStatisticFlags) -> Self { self.pipeline_statistics = pipeline_statistics; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: CommandBufferUsageFlags, pub p_inheritance_info: *const CommandBufferInheritanceInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferBeginInfo<'_> {} unsafe impl Sync for CommandBufferBeginInfo<'_> {} impl ::core::default::Default for CommandBufferBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: CommandBufferUsageFlags::default(), p_inheritance_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_BEGIN_INFO; } pub unsafe trait ExtendsCommandBufferBeginInfo {} impl<'a> CommandBufferBeginInfo<'a> { #[inline] pub fn flags(mut self, flags: CommandBufferUsageFlags) -> Self { self.flags = flags; self } #[inline] pub fn inheritance_info( mut self, inheritance_info: &'a CommandBufferInheritanceInfo<'a>, ) -> Self { self.p_inheritance_info = inheritance_info; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub render_pass: RenderPass, pub framebuffer: Framebuffer, pub render_area: Rect2D, pub clear_value_count: u32, pub p_clear_values: *const ClearValue, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassBeginInfo<'_> {} unsafe impl Sync for RenderPassBeginInfo<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for RenderPassBeginInfo<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("RenderPassBeginInfo") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("render_pass", &self.render_pass) .field("framebuffer", &self.framebuffer) .field("render_area", &self.render_area) .field("clear_value_count", &self.clear_value_count) .field("p_clear_values", &"union") .finish() } } impl ::core::default::Default for RenderPassBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), render_pass: RenderPass::default(), framebuffer: Framebuffer::default(), render_area: Rect2D::default(), clear_value_count: u32::default(), p_clear_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_BEGIN_INFO; } pub unsafe trait ExtendsRenderPassBeginInfo {} impl<'a> RenderPassBeginInfo<'a> { #[inline] pub fn render_pass(mut self, render_pass: RenderPass) -> Self { self.render_pass = render_pass; self } #[inline] pub fn framebuffer(mut self, framebuffer: Framebuffer) -> Self { self.framebuffer = framebuffer; self } #[inline] pub fn render_area(mut self, render_area: Rect2D) -> Self { self.render_area = render_area; self } #[inline] pub fn clear_values(mut self, clear_values: &'a [ClearValue]) -> Self { self.clear_value_count = clear_values.len() as _; self.p_clear_values = clear_values.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union ClearColorValue { pub float32: [f32; 4], pub int32: [i32; 4], pub uint32: [u32; 4], } impl ::core::default::Default for ClearColorValue { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ClearDepthStencilValue { pub depth: f32, pub stencil: u32, } impl ClearDepthStencilValue { #[inline] pub fn depth(mut self, depth: f32) -> Self { self.depth = depth; self } #[inline] pub fn stencil(mut self, stencil: u32) -> Self { self.stencil = stencil; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union ClearValue { pub color: ClearColorValue, pub depth_stencil: ClearDepthStencilValue, } impl ::core::default::Default for ClearValue { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ClearAttachment { pub aspect_mask: ImageAspectFlags, pub color_attachment: u32, pub clear_value: ClearValue, } #[cfg(feature = "debug")] impl fmt::Debug for ClearAttachment { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("ClearAttachment") .field("aspect_mask", &self.aspect_mask) .field("color_attachment", &self.color_attachment) .field("clear_value", &"union") .finish() } } impl ClearAttachment { #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[inline] pub fn color_attachment(mut self, color_attachment: u32) -> Self { self.color_attachment = color_attachment; self } #[inline] pub fn clear_value(mut self, clear_value: ClearValue) -> Self { self.clear_value = clear_value; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AttachmentDescription { pub flags: AttachmentDescriptionFlags, pub format: Format, pub samples: SampleCountFlags, pub load_op: AttachmentLoadOp, pub store_op: AttachmentStoreOp, pub stencil_load_op: AttachmentLoadOp, pub stencil_store_op: AttachmentStoreOp, pub initial_layout: ImageLayout, pub final_layout: ImageLayout, } impl AttachmentDescription { #[inline] pub fn flags(mut self, flags: AttachmentDescriptionFlags) -> Self { self.flags = flags; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn samples(mut self, samples: SampleCountFlags) -> Self { self.samples = samples; self } #[inline] pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self { self.load_op = load_op; self } #[inline] pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self { self.store_op = store_op; self } #[inline] pub fn stencil_load_op(mut self, stencil_load_op: AttachmentLoadOp) -> Self { self.stencil_load_op = stencil_load_op; self } #[inline] pub fn stencil_store_op(mut self, stencil_store_op: AttachmentStoreOp) -> Self { self.stencil_store_op = stencil_store_op; self } #[inline] pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self { self.initial_layout = initial_layout; self } #[inline] pub fn final_layout(mut self, final_layout: ImageLayout) -> Self { self.final_layout = final_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AttachmentReference { pub attachment: u32, pub layout: ImageLayout, } impl AttachmentReference { #[inline] pub fn attachment(mut self, attachment: u32) -> Self { self.attachment = attachment; self } #[inline] pub fn layout(mut self, layout: ImageLayout) -> Self { self.layout = layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassDescription<'a> { pub flags: SubpassDescriptionFlags, pub pipeline_bind_point: PipelineBindPoint, pub input_attachment_count: u32, pub p_input_attachments: *const AttachmentReference, pub color_attachment_count: u32, pub p_color_attachments: *const AttachmentReference, pub p_resolve_attachments: *const AttachmentReference, pub p_depth_stencil_attachment: *const AttachmentReference, pub preserve_attachment_count: u32, pub p_preserve_attachments: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassDescription<'_> {} unsafe impl Sync for SubpassDescription<'_> {} impl ::core::default::Default for SubpassDescription<'_> { #[inline] fn default() -> Self { Self { flags: SubpassDescriptionFlags::default(), pipeline_bind_point: PipelineBindPoint::default(), input_attachment_count: u32::default(), p_input_attachments: ::core::ptr::null(), color_attachment_count: u32::default(), p_color_attachments: ::core::ptr::null(), p_resolve_attachments: ::core::ptr::null(), p_depth_stencil_attachment: ::core::ptr::null(), preserve_attachment_count: u32::default(), p_preserve_attachments: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> SubpassDescription<'a> { #[inline] pub fn flags(mut self, flags: SubpassDescriptionFlags) -> Self { self.flags = flags; self } #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn input_attachments(mut self, input_attachments: &'a [AttachmentReference]) -> Self { self.input_attachment_count = input_attachments.len() as _; self.p_input_attachments = input_attachments.as_ptr(); self } #[inline] pub fn color_attachments(mut self, color_attachments: &'a [AttachmentReference]) -> Self { self.color_attachment_count = color_attachments.len() as _; self.p_color_attachments = color_attachments.as_ptr(); self } #[inline] pub fn resolve_attachments(mut self, resolve_attachments: &'a [AttachmentReference]) -> Self { self.color_attachment_count = resolve_attachments.len() as _; self.p_resolve_attachments = resolve_attachments.as_ptr(); self } #[inline] pub fn depth_stencil_attachment( mut self, depth_stencil_attachment: &'a AttachmentReference, ) -> Self { self.p_depth_stencil_attachment = depth_stencil_attachment; self } #[inline] pub fn preserve_attachments(mut self, preserve_attachments: &'a [u32]) -> Self { self.preserve_attachment_count = preserve_attachments.len() as _; self.p_preserve_attachments = preserve_attachments.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SubpassDependency { pub src_subpass: u32, pub dst_subpass: u32, pub src_stage_mask: PipelineStageFlags, pub dst_stage_mask: PipelineStageFlags, pub src_access_mask: AccessFlags, pub dst_access_mask: AccessFlags, pub dependency_flags: DependencyFlags, } impl SubpassDependency { #[inline] pub fn src_subpass(mut self, src_subpass: u32) -> Self { self.src_subpass = src_subpass; self } #[inline] pub fn dst_subpass(mut self, dst_subpass: u32) -> Self { self.dst_subpass = dst_subpass; self } #[inline] pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags) -> Self { self.src_stage_mask = src_stage_mask; self } #[inline] pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags) -> Self { self.dst_stage_mask = dst_stage_mask; self } #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self { self.dependency_flags = dependency_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: RenderPassCreateFlags, pub attachment_count: u32, pub p_attachments: *const AttachmentDescription, pub subpass_count: u32, pub p_subpasses: *const SubpassDescription<'a>, pub dependency_count: u32, pub p_dependencies: *const SubpassDependency, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassCreateInfo<'_> {} unsafe impl Sync for RenderPassCreateInfo<'_> {} impl ::core::default::Default for RenderPassCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: RenderPassCreateFlags::default(), attachment_count: u32::default(), p_attachments: ::core::ptr::null(), subpass_count: u32::default(), p_subpasses: ::core::ptr::null(), dependency_count: u32::default(), p_dependencies: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATE_INFO; } pub unsafe trait ExtendsRenderPassCreateInfo {} impl<'a> RenderPassCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: RenderPassCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn attachments(mut self, attachments: &'a [AttachmentDescription]) -> Self { self.attachment_count = attachments.len() as _; self.p_attachments = attachments.as_ptr(); self } #[inline] pub fn subpasses(mut self, subpasses: &'a [SubpassDescription<'a>]) -> Self { self.subpass_count = subpasses.len() as _; self.p_subpasses = subpasses.as_ptr(); self } #[inline] pub fn dependencies(mut self, dependencies: &'a [SubpassDependency]) -> Self { self.dependency_count = dependencies.len() as _; self.p_dependencies = dependencies.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct EventCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: EventCreateFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for EventCreateInfo<'_> {} unsafe impl Sync for EventCreateInfo<'_> {} impl ::core::default::Default for EventCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: EventCreateFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for EventCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EVENT_CREATE_INFO; } pub unsafe trait ExtendsEventCreateInfo {} impl<'a> EventCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: EventCreateFlags) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FenceCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: FenceCreateFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FenceCreateInfo<'_> {} unsafe impl Sync for FenceCreateInfo<'_> {} impl ::core::default::Default for FenceCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: FenceCreateFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FenceCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FENCE_CREATE_INFO; } pub unsafe trait ExtendsFenceCreateInfo {} impl<'a> FenceCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: FenceCreateFlags) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFeatures { pub robust_buffer_access: Bool32, pub full_draw_index_uint32: Bool32, pub image_cube_array: Bool32, pub independent_blend: Bool32, pub geometry_shader: Bool32, pub tessellation_shader: Bool32, pub sample_rate_shading: Bool32, pub dual_src_blend: Bool32, pub logic_op: Bool32, pub multi_draw_indirect: Bool32, pub draw_indirect_first_instance: Bool32, pub depth_clamp: Bool32, pub depth_bias_clamp: Bool32, pub fill_mode_non_solid: Bool32, pub depth_bounds: Bool32, pub wide_lines: Bool32, pub large_points: Bool32, pub alpha_to_one: Bool32, pub multi_viewport: Bool32, pub sampler_anisotropy: Bool32, pub texture_compression_etc2: Bool32, pub texture_compression_astc_ldr: Bool32, pub texture_compression_bc: Bool32, pub occlusion_query_precise: Bool32, pub pipeline_statistics_query: Bool32, pub vertex_pipeline_stores_and_atomics: Bool32, pub fragment_stores_and_atomics: Bool32, pub shader_tessellation_and_geometry_point_size: Bool32, pub shader_image_gather_extended: Bool32, pub shader_storage_image_extended_formats: Bool32, pub shader_storage_image_multisample: Bool32, pub shader_storage_image_read_without_format: Bool32, pub shader_storage_image_write_without_format: Bool32, pub shader_uniform_buffer_array_dynamic_indexing: Bool32, pub shader_sampled_image_array_dynamic_indexing: Bool32, pub shader_storage_buffer_array_dynamic_indexing: Bool32, pub shader_storage_image_array_dynamic_indexing: Bool32, pub shader_clip_distance: Bool32, pub shader_cull_distance: Bool32, pub shader_float64: Bool32, pub shader_int64: Bool32, pub shader_int16: Bool32, pub shader_resource_residency: Bool32, pub shader_resource_min_lod: Bool32, pub sparse_binding: Bool32, pub sparse_residency_buffer: Bool32, pub sparse_residency_image2_d: Bool32, pub sparse_residency_image3_d: Bool32, pub sparse_residency2_samples: Bool32, pub sparse_residency4_samples: Bool32, pub sparse_residency8_samples: Bool32, pub sparse_residency16_samples: Bool32, pub sparse_residency_aliased: Bool32, pub variable_multisample_rate: Bool32, pub inherited_queries: Bool32, } impl PhysicalDeviceFeatures { #[inline] pub fn robust_buffer_access(mut self, robust_buffer_access: bool) -> Self { self.robust_buffer_access = robust_buffer_access.into(); self } #[inline] pub fn full_draw_index_uint32(mut self, full_draw_index_uint32: bool) -> Self { self.full_draw_index_uint32 = full_draw_index_uint32.into(); self } #[inline] pub fn image_cube_array(mut self, image_cube_array: bool) -> Self { self.image_cube_array = image_cube_array.into(); self } #[inline] pub fn independent_blend(mut self, independent_blend: bool) -> Self { self.independent_blend = independent_blend.into(); self } #[inline] pub fn geometry_shader(mut self, geometry_shader: bool) -> Self { self.geometry_shader = geometry_shader.into(); self } #[inline] pub fn tessellation_shader(mut self, tessellation_shader: bool) -> Self { self.tessellation_shader = tessellation_shader.into(); self } #[inline] pub fn sample_rate_shading(mut self, sample_rate_shading: bool) -> Self { self.sample_rate_shading = sample_rate_shading.into(); self } #[inline] pub fn dual_src_blend(mut self, dual_src_blend: bool) -> Self { self.dual_src_blend = dual_src_blend.into(); self } #[inline] pub fn logic_op(mut self, logic_op: bool) -> Self { self.logic_op = logic_op.into(); self } #[inline] pub fn multi_draw_indirect(mut self, multi_draw_indirect: bool) -> Self { self.multi_draw_indirect = multi_draw_indirect.into(); self } #[inline] pub fn draw_indirect_first_instance(mut self, draw_indirect_first_instance: bool) -> Self { self.draw_indirect_first_instance = draw_indirect_first_instance.into(); self } #[inline] pub fn depth_clamp(mut self, depth_clamp: bool) -> Self { self.depth_clamp = depth_clamp.into(); self } #[inline] pub fn depth_bias_clamp(mut self, depth_bias_clamp: bool) -> Self { self.depth_bias_clamp = depth_bias_clamp.into(); self } #[inline] pub fn fill_mode_non_solid(mut self, fill_mode_non_solid: bool) -> Self { self.fill_mode_non_solid = fill_mode_non_solid.into(); self } #[inline] pub fn depth_bounds(mut self, depth_bounds: bool) -> Self { self.depth_bounds = depth_bounds.into(); self } #[inline] pub fn wide_lines(mut self, wide_lines: bool) -> Self { self.wide_lines = wide_lines.into(); self } #[inline] pub fn large_points(mut self, large_points: bool) -> Self { self.large_points = large_points.into(); self } #[inline] pub fn alpha_to_one(mut self, alpha_to_one: bool) -> Self { self.alpha_to_one = alpha_to_one.into(); self } #[inline] pub fn multi_viewport(mut self, multi_viewport: bool) -> Self { self.multi_viewport = multi_viewport.into(); self } #[inline] pub fn sampler_anisotropy(mut self, sampler_anisotropy: bool) -> Self { self.sampler_anisotropy = sampler_anisotropy.into(); self } #[inline] pub fn texture_compression_etc2(mut self, texture_compression_etc2: bool) -> Self { self.texture_compression_etc2 = texture_compression_etc2.into(); self } #[inline] pub fn texture_compression_astc_ldr(mut self, texture_compression_astc_ldr: bool) -> Self { self.texture_compression_astc_ldr = texture_compression_astc_ldr.into(); self } #[inline] pub fn texture_compression_bc(mut self, texture_compression_bc: bool) -> Self { self.texture_compression_bc = texture_compression_bc.into(); self } #[inline] pub fn occlusion_query_precise(mut self, occlusion_query_precise: bool) -> Self { self.occlusion_query_precise = occlusion_query_precise.into(); self } #[inline] pub fn pipeline_statistics_query(mut self, pipeline_statistics_query: bool) -> Self { self.pipeline_statistics_query = pipeline_statistics_query.into(); self } #[inline] pub fn vertex_pipeline_stores_and_atomics( mut self, vertex_pipeline_stores_and_atomics: bool, ) -> Self { self.vertex_pipeline_stores_and_atomics = vertex_pipeline_stores_and_atomics.into(); self } #[inline] pub fn fragment_stores_and_atomics(mut self, fragment_stores_and_atomics: bool) -> Self { self.fragment_stores_and_atomics = fragment_stores_and_atomics.into(); self } #[inline] pub fn shader_tessellation_and_geometry_point_size( mut self, shader_tessellation_and_geometry_point_size: bool, ) -> Self { self.shader_tessellation_and_geometry_point_size = shader_tessellation_and_geometry_point_size.into(); self } #[inline] pub fn shader_image_gather_extended(mut self, shader_image_gather_extended: bool) -> Self { self.shader_image_gather_extended = shader_image_gather_extended.into(); self } #[inline] pub fn shader_storage_image_extended_formats( mut self, shader_storage_image_extended_formats: bool, ) -> Self { self.shader_storage_image_extended_formats = shader_storage_image_extended_formats.into(); self } #[inline] pub fn shader_storage_image_multisample( mut self, shader_storage_image_multisample: bool, ) -> Self { self.shader_storage_image_multisample = shader_storage_image_multisample.into(); self } #[inline] pub fn shader_storage_image_read_without_format( mut self, shader_storage_image_read_without_format: bool, ) -> Self { self.shader_storage_image_read_without_format = shader_storage_image_read_without_format.into(); self } #[inline] pub fn shader_storage_image_write_without_format( mut self, shader_storage_image_write_without_format: bool, ) -> Self { self.shader_storage_image_write_without_format = shader_storage_image_write_without_format.into(); self } #[inline] pub fn shader_uniform_buffer_array_dynamic_indexing( mut self, shader_uniform_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_uniform_buffer_array_dynamic_indexing = shader_uniform_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_sampled_image_array_dynamic_indexing( mut self, shader_sampled_image_array_dynamic_indexing: bool, ) -> Self { self.shader_sampled_image_array_dynamic_indexing = shader_sampled_image_array_dynamic_indexing.into(); self } #[inline] pub fn shader_storage_buffer_array_dynamic_indexing( mut self, shader_storage_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_storage_buffer_array_dynamic_indexing = shader_storage_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_storage_image_array_dynamic_indexing( mut self, shader_storage_image_array_dynamic_indexing: bool, ) -> Self { self.shader_storage_image_array_dynamic_indexing = shader_storage_image_array_dynamic_indexing.into(); self } #[inline] pub fn shader_clip_distance(mut self, shader_clip_distance: bool) -> Self { self.shader_clip_distance = shader_clip_distance.into(); self } #[inline] pub fn shader_cull_distance(mut self, shader_cull_distance: bool) -> Self { self.shader_cull_distance = shader_cull_distance.into(); self } #[inline] pub fn shader_float64(mut self, shader_float64: bool) -> Self { self.shader_float64 = shader_float64.into(); self } #[inline] pub fn shader_int64(mut self, shader_int64: bool) -> Self { self.shader_int64 = shader_int64.into(); self } #[inline] pub fn shader_int16(mut self, shader_int16: bool) -> Self { self.shader_int16 = shader_int16.into(); self } #[inline] pub fn shader_resource_residency(mut self, shader_resource_residency: bool) -> Self { self.shader_resource_residency = shader_resource_residency.into(); self } #[inline] pub fn shader_resource_min_lod(mut self, shader_resource_min_lod: bool) -> Self { self.shader_resource_min_lod = shader_resource_min_lod.into(); self } #[inline] pub fn sparse_binding(mut self, sparse_binding: bool) -> Self { self.sparse_binding = sparse_binding.into(); self } #[inline] pub fn sparse_residency_buffer(mut self, sparse_residency_buffer: bool) -> Self { self.sparse_residency_buffer = sparse_residency_buffer.into(); self } #[inline] pub fn sparse_residency_image2_d(mut self, sparse_residency_image2_d: bool) -> Self { self.sparse_residency_image2_d = sparse_residency_image2_d.into(); self } #[inline] pub fn sparse_residency_image3_d(mut self, sparse_residency_image3_d: bool) -> Self { self.sparse_residency_image3_d = sparse_residency_image3_d.into(); self } #[inline] pub fn sparse_residency2_samples(mut self, sparse_residency2_samples: bool) -> Self { self.sparse_residency2_samples = sparse_residency2_samples.into(); self } #[inline] pub fn sparse_residency4_samples(mut self, sparse_residency4_samples: bool) -> Self { self.sparse_residency4_samples = sparse_residency4_samples.into(); self } #[inline] pub fn sparse_residency8_samples(mut self, sparse_residency8_samples: bool) -> Self { self.sparse_residency8_samples = sparse_residency8_samples.into(); self } #[inline] pub fn sparse_residency16_samples(mut self, sparse_residency16_samples: bool) -> Self { self.sparse_residency16_samples = sparse_residency16_samples.into(); self } #[inline] pub fn sparse_residency_aliased(mut self, sparse_residency_aliased: bool) -> Self { self.sparse_residency_aliased = sparse_residency_aliased.into(); self } #[inline] pub fn variable_multisample_rate(mut self, variable_multisample_rate: bool) -> Self { self.variable_multisample_rate = variable_multisample_rate.into(); self } #[inline] pub fn inherited_queries(mut self, inherited_queries: bool) -> Self { self.inherited_queries = inherited_queries.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSparseProperties { pub residency_standard2_d_block_shape: Bool32, pub residency_standard2_d_multisample_block_shape: Bool32, pub residency_standard3_d_block_shape: Bool32, pub residency_aligned_mip_size: Bool32, pub residency_non_resident_strict: Bool32, } impl PhysicalDeviceSparseProperties { #[inline] pub fn residency_standard2_d_block_shape( mut self, residency_standard2_d_block_shape: bool, ) -> Self { self.residency_standard2_d_block_shape = residency_standard2_d_block_shape.into(); self } #[inline] pub fn residency_standard2_d_multisample_block_shape( mut self, residency_standard2_d_multisample_block_shape: bool, ) -> Self { self.residency_standard2_d_multisample_block_shape = residency_standard2_d_multisample_block_shape.into(); self } #[inline] pub fn residency_standard3_d_block_shape( mut self, residency_standard3_d_block_shape: bool, ) -> Self { self.residency_standard3_d_block_shape = residency_standard3_d_block_shape.into(); self } #[inline] pub fn residency_aligned_mip_size(mut self, residency_aligned_mip_size: bool) -> Self { self.residency_aligned_mip_size = residency_aligned_mip_size.into(); self } #[inline] pub fn residency_non_resident_strict(mut self, residency_non_resident_strict: bool) -> Self { self.residency_non_resident_strict = residency_non_resident_strict.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLimits { pub max_image_dimension1_d: u32, pub max_image_dimension2_d: u32, pub max_image_dimension3_d: u32, pub max_image_dimension_cube: u32, pub max_image_array_layers: u32, pub max_texel_buffer_elements: u32, pub max_uniform_buffer_range: u32, pub max_storage_buffer_range: u32, pub max_push_constants_size: u32, pub max_memory_allocation_count: u32, pub max_sampler_allocation_count: u32, pub buffer_image_granularity: DeviceSize, pub sparse_address_space_size: DeviceSize, pub max_bound_descriptor_sets: u32, pub max_per_stage_descriptor_samplers: u32, pub max_per_stage_descriptor_uniform_buffers: u32, pub max_per_stage_descriptor_storage_buffers: u32, pub max_per_stage_descriptor_sampled_images: u32, pub max_per_stage_descriptor_storage_images: u32, pub max_per_stage_descriptor_input_attachments: u32, pub max_per_stage_resources: u32, pub max_descriptor_set_samplers: u32, pub max_descriptor_set_uniform_buffers: u32, pub max_descriptor_set_uniform_buffers_dynamic: u32, pub max_descriptor_set_storage_buffers: u32, pub max_descriptor_set_storage_buffers_dynamic: u32, pub max_descriptor_set_sampled_images: u32, pub max_descriptor_set_storage_images: u32, pub max_descriptor_set_input_attachments: u32, pub max_vertex_input_attributes: u32, pub max_vertex_input_bindings: u32, pub max_vertex_input_attribute_offset: u32, pub max_vertex_input_binding_stride: u32, pub max_vertex_output_components: u32, pub max_tessellation_generation_level: u32, pub max_tessellation_patch_size: u32, pub max_tessellation_control_per_vertex_input_components: u32, pub max_tessellation_control_per_vertex_output_components: u32, pub max_tessellation_control_per_patch_output_components: u32, pub max_tessellation_control_total_output_components: u32, pub max_tessellation_evaluation_input_components: u32, pub max_tessellation_evaluation_output_components: u32, pub max_geometry_shader_invocations: u32, pub max_geometry_input_components: u32, pub max_geometry_output_components: u32, pub max_geometry_output_vertices: u32, pub max_geometry_total_output_components: u32, pub max_fragment_input_components: u32, pub max_fragment_output_attachments: u32, pub max_fragment_dual_src_attachments: u32, pub max_fragment_combined_output_resources: u32, pub max_compute_shared_memory_size: u32, pub max_compute_work_group_count: [u32; 3], pub max_compute_work_group_invocations: u32, pub max_compute_work_group_size: [u32; 3], pub sub_pixel_precision_bits: u32, pub sub_texel_precision_bits: u32, pub mipmap_precision_bits: u32, pub max_draw_indexed_index_value: u32, pub max_draw_indirect_count: u32, pub max_sampler_lod_bias: f32, pub max_sampler_anisotropy: f32, pub max_viewports: u32, pub max_viewport_dimensions: [u32; 2], pub viewport_bounds_range: [f32; 2], pub viewport_sub_pixel_bits: u32, pub min_memory_map_alignment: usize, pub min_texel_buffer_offset_alignment: DeviceSize, pub min_uniform_buffer_offset_alignment: DeviceSize, pub min_storage_buffer_offset_alignment: DeviceSize, pub min_texel_offset: i32, pub max_texel_offset: u32, pub min_texel_gather_offset: i32, pub max_texel_gather_offset: u32, pub min_interpolation_offset: f32, pub max_interpolation_offset: f32, pub sub_pixel_interpolation_offset_bits: u32, pub max_framebuffer_width: u32, pub max_framebuffer_height: u32, pub max_framebuffer_layers: u32, pub framebuffer_color_sample_counts: SampleCountFlags, pub framebuffer_depth_sample_counts: SampleCountFlags, pub framebuffer_stencil_sample_counts: SampleCountFlags, pub framebuffer_no_attachments_sample_counts: SampleCountFlags, pub max_color_attachments: u32, pub sampled_image_color_sample_counts: SampleCountFlags, pub sampled_image_integer_sample_counts: SampleCountFlags, pub sampled_image_depth_sample_counts: SampleCountFlags, pub sampled_image_stencil_sample_counts: SampleCountFlags, pub storage_image_sample_counts: SampleCountFlags, pub max_sample_mask_words: u32, pub timestamp_compute_and_graphics: Bool32, pub timestamp_period: f32, pub max_clip_distances: u32, pub max_cull_distances: u32, pub max_combined_clip_and_cull_distances: u32, pub discrete_queue_priorities: u32, pub point_size_range: [f32; 2], pub line_width_range: [f32; 2], pub point_size_granularity: f32, pub line_width_granularity: f32, pub strict_lines: Bool32, pub standard_sample_locations: Bool32, pub optimal_buffer_copy_offset_alignment: DeviceSize, pub optimal_buffer_copy_row_pitch_alignment: DeviceSize, pub non_coherent_atom_size: DeviceSize, } impl ::core::default::Default for PhysicalDeviceLimits { #[inline] fn default() -> Self { Self { max_image_dimension1_d: u32::default(), max_image_dimension2_d: u32::default(), max_image_dimension3_d: u32::default(), max_image_dimension_cube: u32::default(), max_image_array_layers: u32::default(), max_texel_buffer_elements: u32::default(), max_uniform_buffer_range: u32::default(), max_storage_buffer_range: u32::default(), max_push_constants_size: u32::default(), max_memory_allocation_count: u32::default(), max_sampler_allocation_count: u32::default(), buffer_image_granularity: DeviceSize::default(), sparse_address_space_size: DeviceSize::default(), max_bound_descriptor_sets: u32::default(), max_per_stage_descriptor_samplers: u32::default(), max_per_stage_descriptor_uniform_buffers: u32::default(), max_per_stage_descriptor_storage_buffers: u32::default(), max_per_stage_descriptor_sampled_images: u32::default(), max_per_stage_descriptor_storage_images: u32::default(), max_per_stage_descriptor_input_attachments: u32::default(), max_per_stage_resources: u32::default(), max_descriptor_set_samplers: u32::default(), max_descriptor_set_uniform_buffers: u32::default(), max_descriptor_set_uniform_buffers_dynamic: u32::default(), max_descriptor_set_storage_buffers: u32::default(), max_descriptor_set_storage_buffers_dynamic: u32::default(), max_descriptor_set_sampled_images: u32::default(), max_descriptor_set_storage_images: u32::default(), max_descriptor_set_input_attachments: u32::default(), max_vertex_input_attributes: u32::default(), max_vertex_input_bindings: u32::default(), max_vertex_input_attribute_offset: u32::default(), max_vertex_input_binding_stride: u32::default(), max_vertex_output_components: u32::default(), max_tessellation_generation_level: u32::default(), max_tessellation_patch_size: u32::default(), max_tessellation_control_per_vertex_input_components: u32::default(), max_tessellation_control_per_vertex_output_components: u32::default(), max_tessellation_control_per_patch_output_components: u32::default(), max_tessellation_control_total_output_components: u32::default(), max_tessellation_evaluation_input_components: u32::default(), max_tessellation_evaluation_output_components: u32::default(), max_geometry_shader_invocations: u32::default(), max_geometry_input_components: u32::default(), max_geometry_output_components: u32::default(), max_geometry_output_vertices: u32::default(), max_geometry_total_output_components: u32::default(), max_fragment_input_components: u32::default(), max_fragment_output_attachments: u32::default(), max_fragment_dual_src_attachments: u32::default(), max_fragment_combined_output_resources: u32::default(), max_compute_shared_memory_size: u32::default(), max_compute_work_group_count: unsafe { ::core::mem::zeroed() }, max_compute_work_group_invocations: u32::default(), max_compute_work_group_size: unsafe { ::core::mem::zeroed() }, sub_pixel_precision_bits: u32::default(), sub_texel_precision_bits: u32::default(), mipmap_precision_bits: u32::default(), max_draw_indexed_index_value: u32::default(), max_draw_indirect_count: u32::default(), max_sampler_lod_bias: f32::default(), max_sampler_anisotropy: f32::default(), max_viewports: u32::default(), max_viewport_dimensions: unsafe { ::core::mem::zeroed() }, viewport_bounds_range: unsafe { ::core::mem::zeroed() }, viewport_sub_pixel_bits: u32::default(), min_memory_map_alignment: usize::default(), min_texel_buffer_offset_alignment: DeviceSize::default(), min_uniform_buffer_offset_alignment: DeviceSize::default(), min_storage_buffer_offset_alignment: DeviceSize::default(), min_texel_offset: i32::default(), max_texel_offset: u32::default(), min_texel_gather_offset: i32::default(), max_texel_gather_offset: u32::default(), min_interpolation_offset: f32::default(), max_interpolation_offset: f32::default(), sub_pixel_interpolation_offset_bits: u32::default(), max_framebuffer_width: u32::default(), max_framebuffer_height: u32::default(), max_framebuffer_layers: u32::default(), framebuffer_color_sample_counts: SampleCountFlags::default(), framebuffer_depth_sample_counts: SampleCountFlags::default(), framebuffer_stencil_sample_counts: SampleCountFlags::default(), framebuffer_no_attachments_sample_counts: SampleCountFlags::default(), max_color_attachments: u32::default(), sampled_image_color_sample_counts: SampleCountFlags::default(), sampled_image_integer_sample_counts: SampleCountFlags::default(), sampled_image_depth_sample_counts: SampleCountFlags::default(), sampled_image_stencil_sample_counts: SampleCountFlags::default(), storage_image_sample_counts: SampleCountFlags::default(), max_sample_mask_words: u32::default(), timestamp_compute_and_graphics: Bool32::default(), timestamp_period: f32::default(), max_clip_distances: u32::default(), max_cull_distances: u32::default(), max_combined_clip_and_cull_distances: u32::default(), discrete_queue_priorities: u32::default(), point_size_range: unsafe { ::core::mem::zeroed() }, line_width_range: unsafe { ::core::mem::zeroed() }, point_size_granularity: f32::default(), line_width_granularity: f32::default(), strict_lines: Bool32::default(), standard_sample_locations: Bool32::default(), optimal_buffer_copy_offset_alignment: DeviceSize::default(), optimal_buffer_copy_row_pitch_alignment: DeviceSize::default(), non_coherent_atom_size: DeviceSize::default(), } } } impl PhysicalDeviceLimits { #[inline] pub fn max_image_dimension1_d(mut self, max_image_dimension1_d: u32) -> Self { self.max_image_dimension1_d = max_image_dimension1_d; self } #[inline] pub fn max_image_dimension2_d(mut self, max_image_dimension2_d: u32) -> Self { self.max_image_dimension2_d = max_image_dimension2_d; self } #[inline] pub fn max_image_dimension3_d(mut self, max_image_dimension3_d: u32) -> Self { self.max_image_dimension3_d = max_image_dimension3_d; self } #[inline] pub fn max_image_dimension_cube(mut self, max_image_dimension_cube: u32) -> Self { self.max_image_dimension_cube = max_image_dimension_cube; self } #[inline] pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self { self.max_image_array_layers = max_image_array_layers; self } #[inline] pub fn max_texel_buffer_elements(mut self, max_texel_buffer_elements: u32) -> Self { self.max_texel_buffer_elements = max_texel_buffer_elements; self } #[inline] pub fn max_uniform_buffer_range(mut self, max_uniform_buffer_range: u32) -> Self { self.max_uniform_buffer_range = max_uniform_buffer_range; self } #[inline] pub fn max_storage_buffer_range(mut self, max_storage_buffer_range: u32) -> Self { self.max_storage_buffer_range = max_storage_buffer_range; self } #[inline] pub fn max_push_constants_size(mut self, max_push_constants_size: u32) -> Self { self.max_push_constants_size = max_push_constants_size; self } #[inline] pub fn max_memory_allocation_count(mut self, max_memory_allocation_count: u32) -> Self { self.max_memory_allocation_count = max_memory_allocation_count; self } #[inline] pub fn max_sampler_allocation_count(mut self, max_sampler_allocation_count: u32) -> Self { self.max_sampler_allocation_count = max_sampler_allocation_count; self } #[inline] pub fn buffer_image_granularity(mut self, buffer_image_granularity: DeviceSize) -> Self { self.buffer_image_granularity = buffer_image_granularity; self } #[inline] pub fn sparse_address_space_size(mut self, sparse_address_space_size: DeviceSize) -> Self { self.sparse_address_space_size = sparse_address_space_size; self } #[inline] pub fn max_bound_descriptor_sets(mut self, max_bound_descriptor_sets: u32) -> Self { self.max_bound_descriptor_sets = max_bound_descriptor_sets; self } #[inline] pub fn max_per_stage_descriptor_samplers( mut self, max_per_stage_descriptor_samplers: u32, ) -> Self { self.max_per_stage_descriptor_samplers = max_per_stage_descriptor_samplers; self } #[inline] pub fn max_per_stage_descriptor_uniform_buffers( mut self, max_per_stage_descriptor_uniform_buffers: u32, ) -> Self { self.max_per_stage_descriptor_uniform_buffers = max_per_stage_descriptor_uniform_buffers; self } #[inline] pub fn max_per_stage_descriptor_storage_buffers( mut self, max_per_stage_descriptor_storage_buffers: u32, ) -> Self { self.max_per_stage_descriptor_storage_buffers = max_per_stage_descriptor_storage_buffers; self } #[inline] pub fn max_per_stage_descriptor_sampled_images( mut self, max_per_stage_descriptor_sampled_images: u32, ) -> Self { self.max_per_stage_descriptor_sampled_images = max_per_stage_descriptor_sampled_images; self } #[inline] pub fn max_per_stage_descriptor_storage_images( mut self, max_per_stage_descriptor_storage_images: u32, ) -> Self { self.max_per_stage_descriptor_storage_images = max_per_stage_descriptor_storage_images; self } #[inline] pub fn max_per_stage_descriptor_input_attachments( mut self, max_per_stage_descriptor_input_attachments: u32, ) -> Self { self.max_per_stage_descriptor_input_attachments = max_per_stage_descriptor_input_attachments; self } #[inline] pub fn max_per_stage_resources(mut self, max_per_stage_resources: u32) -> Self { self.max_per_stage_resources = max_per_stage_resources; self } #[inline] pub fn max_descriptor_set_samplers(mut self, max_descriptor_set_samplers: u32) -> Self { self.max_descriptor_set_samplers = max_descriptor_set_samplers; self } #[inline] pub fn max_descriptor_set_uniform_buffers( mut self, max_descriptor_set_uniform_buffers: u32, ) -> Self { self.max_descriptor_set_uniform_buffers = max_descriptor_set_uniform_buffers; self } #[inline] pub fn max_descriptor_set_uniform_buffers_dynamic( mut self, max_descriptor_set_uniform_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_uniform_buffers_dynamic = max_descriptor_set_uniform_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_storage_buffers( mut self, max_descriptor_set_storage_buffers: u32, ) -> Self { self.max_descriptor_set_storage_buffers = max_descriptor_set_storage_buffers; self } #[inline] pub fn max_descriptor_set_storage_buffers_dynamic( mut self, max_descriptor_set_storage_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_storage_buffers_dynamic = max_descriptor_set_storage_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_sampled_images( mut self, max_descriptor_set_sampled_images: u32, ) -> Self { self.max_descriptor_set_sampled_images = max_descriptor_set_sampled_images; self } #[inline] pub fn max_descriptor_set_storage_images( mut self, max_descriptor_set_storage_images: u32, ) -> Self { self.max_descriptor_set_storage_images = max_descriptor_set_storage_images; self } #[inline] pub fn max_descriptor_set_input_attachments( mut self, max_descriptor_set_input_attachments: u32, ) -> Self { self.max_descriptor_set_input_attachments = max_descriptor_set_input_attachments; self } #[inline] pub fn max_vertex_input_attributes(mut self, max_vertex_input_attributes: u32) -> Self { self.max_vertex_input_attributes = max_vertex_input_attributes; self } #[inline] pub fn max_vertex_input_bindings(mut self, max_vertex_input_bindings: u32) -> Self { self.max_vertex_input_bindings = max_vertex_input_bindings; self } #[inline] pub fn max_vertex_input_attribute_offset( mut self, max_vertex_input_attribute_offset: u32, ) -> Self { self.max_vertex_input_attribute_offset = max_vertex_input_attribute_offset; self } #[inline] pub fn max_vertex_input_binding_stride(mut self, max_vertex_input_binding_stride: u32) -> Self { self.max_vertex_input_binding_stride = max_vertex_input_binding_stride; self } #[inline] pub fn max_vertex_output_components(mut self, max_vertex_output_components: u32) -> Self { self.max_vertex_output_components = max_vertex_output_components; self } #[inline] pub fn max_tessellation_generation_level( mut self, max_tessellation_generation_level: u32, ) -> Self { self.max_tessellation_generation_level = max_tessellation_generation_level; self } #[inline] pub fn max_tessellation_patch_size(mut self, max_tessellation_patch_size: u32) -> Self { self.max_tessellation_patch_size = max_tessellation_patch_size; self } #[inline] pub fn max_tessellation_control_per_vertex_input_components( mut self, max_tessellation_control_per_vertex_input_components: u32, ) -> Self { self.max_tessellation_control_per_vertex_input_components = max_tessellation_control_per_vertex_input_components; self } #[inline] pub fn max_tessellation_control_per_vertex_output_components( mut self, max_tessellation_control_per_vertex_output_components: u32, ) -> Self { self.max_tessellation_control_per_vertex_output_components = max_tessellation_control_per_vertex_output_components; self } #[inline] pub fn max_tessellation_control_per_patch_output_components( mut self, max_tessellation_control_per_patch_output_components: u32, ) -> Self { self.max_tessellation_control_per_patch_output_components = max_tessellation_control_per_patch_output_components; self } #[inline] pub fn max_tessellation_control_total_output_components( mut self, max_tessellation_control_total_output_components: u32, ) -> Self { self.max_tessellation_control_total_output_components = max_tessellation_control_total_output_components; self } #[inline] pub fn max_tessellation_evaluation_input_components( mut self, max_tessellation_evaluation_input_components: u32, ) -> Self { self.max_tessellation_evaluation_input_components = max_tessellation_evaluation_input_components; self } #[inline] pub fn max_tessellation_evaluation_output_components( mut self, max_tessellation_evaluation_output_components: u32, ) -> Self { self.max_tessellation_evaluation_output_components = max_tessellation_evaluation_output_components; self } #[inline] pub fn max_geometry_shader_invocations(mut self, max_geometry_shader_invocations: u32) -> Self { self.max_geometry_shader_invocations = max_geometry_shader_invocations; self } #[inline] pub fn max_geometry_input_components(mut self, max_geometry_input_components: u32) -> Self { self.max_geometry_input_components = max_geometry_input_components; self } #[inline] pub fn max_geometry_output_components(mut self, max_geometry_output_components: u32) -> Self { self.max_geometry_output_components = max_geometry_output_components; self } #[inline] pub fn max_geometry_output_vertices(mut self, max_geometry_output_vertices: u32) -> Self { self.max_geometry_output_vertices = max_geometry_output_vertices; self } #[inline] pub fn max_geometry_total_output_components( mut self, max_geometry_total_output_components: u32, ) -> Self { self.max_geometry_total_output_components = max_geometry_total_output_components; self } #[inline] pub fn max_fragment_input_components(mut self, max_fragment_input_components: u32) -> Self { self.max_fragment_input_components = max_fragment_input_components; self } #[inline] pub fn max_fragment_output_attachments(mut self, max_fragment_output_attachments: u32) -> Self { self.max_fragment_output_attachments = max_fragment_output_attachments; self } #[inline] pub fn max_fragment_dual_src_attachments( mut self, max_fragment_dual_src_attachments: u32, ) -> Self { self.max_fragment_dual_src_attachments = max_fragment_dual_src_attachments; self } #[inline] pub fn max_fragment_combined_output_resources( mut self, max_fragment_combined_output_resources: u32, ) -> Self { self.max_fragment_combined_output_resources = max_fragment_combined_output_resources; self } #[inline] pub fn max_compute_shared_memory_size(mut self, max_compute_shared_memory_size: u32) -> Self { self.max_compute_shared_memory_size = max_compute_shared_memory_size; self } #[inline] pub fn max_compute_work_group_count(mut self, max_compute_work_group_count: [u32; 3]) -> Self { self.max_compute_work_group_count = max_compute_work_group_count; self } #[inline] pub fn max_compute_work_group_invocations( mut self, max_compute_work_group_invocations: u32, ) -> Self { self.max_compute_work_group_invocations = max_compute_work_group_invocations; self } #[inline] pub fn max_compute_work_group_size(mut self, max_compute_work_group_size: [u32; 3]) -> Self { self.max_compute_work_group_size = max_compute_work_group_size; self } #[inline] pub fn sub_pixel_precision_bits(mut self, sub_pixel_precision_bits: u32) -> Self { self.sub_pixel_precision_bits = sub_pixel_precision_bits; self } #[inline] pub fn sub_texel_precision_bits(mut self, sub_texel_precision_bits: u32) -> Self { self.sub_texel_precision_bits = sub_texel_precision_bits; self } #[inline] pub fn mipmap_precision_bits(mut self, mipmap_precision_bits: u32) -> Self { self.mipmap_precision_bits = mipmap_precision_bits; self } #[inline] pub fn max_draw_indexed_index_value(mut self, max_draw_indexed_index_value: u32) -> Self { self.max_draw_indexed_index_value = max_draw_indexed_index_value; self } #[inline] pub fn max_draw_indirect_count(mut self, max_draw_indirect_count: u32) -> Self { self.max_draw_indirect_count = max_draw_indirect_count; self } #[inline] pub fn max_sampler_lod_bias(mut self, max_sampler_lod_bias: f32) -> Self { self.max_sampler_lod_bias = max_sampler_lod_bias; self } #[inline] pub fn max_sampler_anisotropy(mut self, max_sampler_anisotropy: f32) -> Self { self.max_sampler_anisotropy = max_sampler_anisotropy; self } #[inline] pub fn max_viewports(mut self, max_viewports: u32) -> Self { self.max_viewports = max_viewports; self } #[inline] pub fn max_viewport_dimensions(mut self, max_viewport_dimensions: [u32; 2]) -> Self { self.max_viewport_dimensions = max_viewport_dimensions; self } #[inline] pub fn viewport_bounds_range(mut self, viewport_bounds_range: [f32; 2]) -> Self { self.viewport_bounds_range = viewport_bounds_range; self } #[inline] pub fn viewport_sub_pixel_bits(mut self, viewport_sub_pixel_bits: u32) -> Self { self.viewport_sub_pixel_bits = viewport_sub_pixel_bits; self } #[inline] pub fn min_memory_map_alignment(mut self, min_memory_map_alignment: usize) -> Self { self.min_memory_map_alignment = min_memory_map_alignment; self } #[inline] pub fn min_texel_buffer_offset_alignment( mut self, min_texel_buffer_offset_alignment: DeviceSize, ) -> Self { self.min_texel_buffer_offset_alignment = min_texel_buffer_offset_alignment; self } #[inline] pub fn min_uniform_buffer_offset_alignment( mut self, min_uniform_buffer_offset_alignment: DeviceSize, ) -> Self { self.min_uniform_buffer_offset_alignment = min_uniform_buffer_offset_alignment; self } #[inline] pub fn min_storage_buffer_offset_alignment( mut self, min_storage_buffer_offset_alignment: DeviceSize, ) -> Self { self.min_storage_buffer_offset_alignment = min_storage_buffer_offset_alignment; self } #[inline] pub fn min_texel_offset(mut self, min_texel_offset: i32) -> Self { self.min_texel_offset = min_texel_offset; self } #[inline] pub fn max_texel_offset(mut self, max_texel_offset: u32) -> Self { self.max_texel_offset = max_texel_offset; self } #[inline] pub fn min_texel_gather_offset(mut self, min_texel_gather_offset: i32) -> Self { self.min_texel_gather_offset = min_texel_gather_offset; self } #[inline] pub fn max_texel_gather_offset(mut self, max_texel_gather_offset: u32) -> Self { self.max_texel_gather_offset = max_texel_gather_offset; self } #[inline] pub fn min_interpolation_offset(mut self, min_interpolation_offset: f32) -> Self { self.min_interpolation_offset = min_interpolation_offset; self } #[inline] pub fn max_interpolation_offset(mut self, max_interpolation_offset: f32) -> Self { self.max_interpolation_offset = max_interpolation_offset; self } #[inline] pub fn sub_pixel_interpolation_offset_bits( mut self, sub_pixel_interpolation_offset_bits: u32, ) -> Self { self.sub_pixel_interpolation_offset_bits = sub_pixel_interpolation_offset_bits; self } #[inline] pub fn max_framebuffer_width(mut self, max_framebuffer_width: u32) -> Self { self.max_framebuffer_width = max_framebuffer_width; self } #[inline] pub fn max_framebuffer_height(mut self, max_framebuffer_height: u32) -> Self { self.max_framebuffer_height = max_framebuffer_height; self } #[inline] pub fn max_framebuffer_layers(mut self, max_framebuffer_layers: u32) -> Self { self.max_framebuffer_layers = max_framebuffer_layers; self } #[inline] pub fn framebuffer_color_sample_counts( mut self, framebuffer_color_sample_counts: SampleCountFlags, ) -> Self { self.framebuffer_color_sample_counts = framebuffer_color_sample_counts; self } #[inline] pub fn framebuffer_depth_sample_counts( mut self, framebuffer_depth_sample_counts: SampleCountFlags, ) -> Self { self.framebuffer_depth_sample_counts = framebuffer_depth_sample_counts; self } #[inline] pub fn framebuffer_stencil_sample_counts( mut self, framebuffer_stencil_sample_counts: SampleCountFlags, ) -> Self { self.framebuffer_stencil_sample_counts = framebuffer_stencil_sample_counts; self } #[inline] pub fn framebuffer_no_attachments_sample_counts( mut self, framebuffer_no_attachments_sample_counts: SampleCountFlags, ) -> Self { self.framebuffer_no_attachments_sample_counts = framebuffer_no_attachments_sample_counts; self } #[inline] pub fn max_color_attachments(mut self, max_color_attachments: u32) -> Self { self.max_color_attachments = max_color_attachments; self } #[inline] pub fn sampled_image_color_sample_counts( mut self, sampled_image_color_sample_counts: SampleCountFlags, ) -> Self { self.sampled_image_color_sample_counts = sampled_image_color_sample_counts; self } #[inline] pub fn sampled_image_integer_sample_counts( mut self, sampled_image_integer_sample_counts: SampleCountFlags, ) -> Self { self.sampled_image_integer_sample_counts = sampled_image_integer_sample_counts; self } #[inline] pub fn sampled_image_depth_sample_counts( mut self, sampled_image_depth_sample_counts: SampleCountFlags, ) -> Self { self.sampled_image_depth_sample_counts = sampled_image_depth_sample_counts; self } #[inline] pub fn sampled_image_stencil_sample_counts( mut self, sampled_image_stencil_sample_counts: SampleCountFlags, ) -> Self { self.sampled_image_stencil_sample_counts = sampled_image_stencil_sample_counts; self } #[inline] pub fn storage_image_sample_counts( mut self, storage_image_sample_counts: SampleCountFlags, ) -> Self { self.storage_image_sample_counts = storage_image_sample_counts; self } #[inline] pub fn max_sample_mask_words(mut self, max_sample_mask_words: u32) -> Self { self.max_sample_mask_words = max_sample_mask_words; self } #[inline] pub fn timestamp_compute_and_graphics(mut self, timestamp_compute_and_graphics: bool) -> Self { self.timestamp_compute_and_graphics = timestamp_compute_and_graphics.into(); self } #[inline] pub fn timestamp_period(mut self, timestamp_period: f32) -> Self { self.timestamp_period = timestamp_period; self } #[inline] pub fn max_clip_distances(mut self, max_clip_distances: u32) -> Self { self.max_clip_distances = max_clip_distances; self } #[inline] pub fn max_cull_distances(mut self, max_cull_distances: u32) -> Self { self.max_cull_distances = max_cull_distances; self } #[inline] pub fn max_combined_clip_and_cull_distances( mut self, max_combined_clip_and_cull_distances: u32, ) -> Self { self.max_combined_clip_and_cull_distances = max_combined_clip_and_cull_distances; self } #[inline] pub fn discrete_queue_priorities(mut self, discrete_queue_priorities: u32) -> Self { self.discrete_queue_priorities = discrete_queue_priorities; self } #[inline] pub fn point_size_range(mut self, point_size_range: [f32; 2]) -> Self { self.point_size_range = point_size_range; self } #[inline] pub fn line_width_range(mut self, line_width_range: [f32; 2]) -> Self { self.line_width_range = line_width_range; self } #[inline] pub fn point_size_granularity(mut self, point_size_granularity: f32) -> Self { self.point_size_granularity = point_size_granularity; self } #[inline] pub fn line_width_granularity(mut self, line_width_granularity: f32) -> Self { self.line_width_granularity = line_width_granularity; self } #[inline] pub fn strict_lines(mut self, strict_lines: bool) -> Self { self.strict_lines = strict_lines.into(); self } #[inline] pub fn standard_sample_locations(mut self, standard_sample_locations: bool) -> Self { self.standard_sample_locations = standard_sample_locations.into(); self } #[inline] pub fn optimal_buffer_copy_offset_alignment( mut self, optimal_buffer_copy_offset_alignment: DeviceSize, ) -> Self { self.optimal_buffer_copy_offset_alignment = optimal_buffer_copy_offset_alignment; self } #[inline] pub fn optimal_buffer_copy_row_pitch_alignment( mut self, optimal_buffer_copy_row_pitch_alignment: DeviceSize, ) -> Self { self.optimal_buffer_copy_row_pitch_alignment = optimal_buffer_copy_row_pitch_alignment; self } #[inline] pub fn non_coherent_atom_size(mut self, non_coherent_atom_size: DeviceSize) -> Self { self.non_coherent_atom_size = non_coherent_atom_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SemaphoreCreateFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreCreateInfo<'_> {} unsafe impl Sync for SemaphoreCreateInfo<'_> {} impl ::core::default::Default for SemaphoreCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SemaphoreCreateFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_CREATE_INFO; } pub unsafe trait ExtendsSemaphoreCreateInfo {} impl<'a> SemaphoreCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: SemaphoreCreateFlags) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueryPoolCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: QueryPoolCreateFlags, pub query_type: QueryType, pub query_count: u32, pub pipeline_statistics: QueryPipelineStatisticFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueryPoolCreateInfo<'_> {} unsafe impl Sync for QueryPoolCreateInfo<'_> {} impl ::core::default::Default for QueryPoolCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: QueryPoolCreateFlags::default(), query_type: QueryType::default(), query_count: u32::default(), pipeline_statistics: QueryPipelineStatisticFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueryPoolCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_CREATE_INFO; } pub unsafe trait ExtendsQueryPoolCreateInfo {} impl<'a> QueryPoolCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: QueryPoolCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn query_type(mut self, query_type: QueryType) -> Self { self.query_type = query_type; self } #[inline] pub fn query_count(mut self, query_count: u32) -> Self { self.query_count = query_count; self } #[inline] pub fn pipeline_statistics(mut self, pipeline_statistics: QueryPipelineStatisticFlags) -> Self { self.pipeline_statistics = pipeline_statistics; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FramebufferCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: FramebufferCreateFlags, pub render_pass: RenderPass, pub attachment_count: u32, pub p_attachments: *const ImageView, pub width: u32, pub height: u32, pub layers: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FramebufferCreateInfo<'_> {} unsafe impl Sync for FramebufferCreateInfo<'_> {} impl ::core::default::Default for FramebufferCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: FramebufferCreateFlags::default(), render_pass: RenderPass::default(), attachment_count: u32::default(), p_attachments: ::core::ptr::null(), width: u32::default(), height: u32::default(), layers: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FramebufferCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_CREATE_INFO; } pub unsafe trait ExtendsFramebufferCreateInfo {} impl<'a> FramebufferCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: FramebufferCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn render_pass(mut self, render_pass: RenderPass) -> Self { self.render_pass = render_pass; self } #[inline] pub fn attachment_count(mut self, attachment_count: u32) -> Self { self.attachment_count = attachment_count; self } #[inline] pub fn attachments(mut self, attachments: &'a [ImageView]) -> Self { self.attachment_count = attachments.len() as _; self.p_attachments = attachments.as_ptr(); self } #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn layers(mut self, layers: u32) -> Self { self.layers = layers; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrawIndirectCommand { pub vertex_count: u32, pub instance_count: u32, pub first_vertex: u32, pub first_instance: u32, } impl DrawIndirectCommand { #[inline] pub fn vertex_count(mut self, vertex_count: u32) -> Self { self.vertex_count = vertex_count; self } #[inline] pub fn instance_count(mut self, instance_count: u32) -> Self { self.instance_count = instance_count; self } #[inline] pub fn first_vertex(mut self, first_vertex: u32) -> Self { self.first_vertex = first_vertex; self } #[inline] pub fn first_instance(mut self, first_instance: u32) -> Self { self.first_instance = first_instance; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrawIndexedIndirectCommand { pub index_count: u32, pub instance_count: u32, pub first_index: u32, pub vertex_offset: i32, pub first_instance: u32, } impl DrawIndexedIndirectCommand { #[inline] pub fn index_count(mut self, index_count: u32) -> Self { self.index_count = index_count; self } #[inline] pub fn instance_count(mut self, instance_count: u32) -> Self { self.instance_count = instance_count; self } #[inline] pub fn first_index(mut self, first_index: u32) -> Self { self.first_index = first_index; self } #[inline] pub fn vertex_offset(mut self, vertex_offset: i32) -> Self { self.vertex_offset = vertex_offset; self } #[inline] pub fn first_instance(mut self, first_instance: u32) -> Self { self.first_instance = first_instance; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DispatchIndirectCommand { pub x: u32, pub y: u32, pub z: u32, } impl DispatchIndirectCommand { #[inline] pub fn x(mut self, x: u32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: u32) -> Self { self.y = y; self } #[inline] pub fn z(mut self, z: u32) -> Self { self.z = z; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MultiDrawInfoEXT { pub first_vertex: u32, pub vertex_count: u32, } impl MultiDrawInfoEXT { #[inline] pub fn first_vertex(mut self, first_vertex: u32) -> Self { self.first_vertex = first_vertex; self } #[inline] pub fn vertex_count(mut self, vertex_count: u32) -> Self { self.vertex_count = vertex_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MultiDrawIndexedInfoEXT { pub first_index: u32, pub index_count: u32, pub vertex_offset: i32, } impl MultiDrawIndexedInfoEXT { #[inline] pub fn first_index(mut self, first_index: u32) -> Self { self.first_index = first_index; self } #[inline] pub fn index_count(mut self, index_count: u32) -> Self { self.index_count = index_count; self } #[inline] pub fn vertex_offset(mut self, vertex_offset: i32) -> Self { self.vertex_offset = vertex_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_count: u32, pub p_wait_semaphores: *const Semaphore, pub p_wait_dst_stage_mask: *const PipelineStageFlags, pub command_buffer_count: u32, pub p_command_buffers: *const CommandBuffer, pub signal_semaphore_count: u32, pub p_signal_semaphores: *const Semaphore, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubmitInfo<'_> {} unsafe impl Sync for SubmitInfo<'_> {} impl ::core::default::Default for SubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_count: u32::default(), p_wait_semaphores: ::core::ptr::null(), p_wait_dst_stage_mask: ::core::ptr::null(), command_buffer_count: u32::default(), p_command_buffers: ::core::ptr::null(), signal_semaphore_count: u32::default(), p_signal_semaphores: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBMIT_INFO; } pub unsafe trait ExtendsSubmitInfo {} impl<'a> SubmitInfo<'a> { #[inline] pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self { self.wait_semaphore_count = wait_semaphores.len() as _; self.p_wait_semaphores = wait_semaphores.as_ptr(); self } #[inline] pub fn wait_dst_stage_mask(mut self, wait_dst_stage_mask: &'a [PipelineStageFlags]) -> Self { self.wait_semaphore_count = wait_dst_stage_mask.len() as _; self.p_wait_dst_stage_mask = wait_dst_stage_mask.as_ptr(); self } #[inline] pub fn command_buffers(mut self, command_buffers: &'a [CommandBuffer]) -> Self { self.command_buffer_count = command_buffers.len() as _; self.p_command_buffers = command_buffers.as_ptr(); self } #[inline] pub fn signal_semaphores(mut self, signal_semaphores: &'a [Semaphore]) -> Self { self.signal_semaphore_count = signal_semaphores.len() as _; self.p_signal_semaphores = signal_semaphores.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPropertiesKHR<'a> { pub display: DisplayKHR, pub display_name: *const c_char, pub physical_dimensions: Extent2D, pub physical_resolution: Extent2D, pub supported_transforms: SurfaceTransformFlagsKHR, pub plane_reorder_possible: Bool32, pub persistent_content: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPropertiesKHR<'_> {} unsafe impl Sync for DisplayPropertiesKHR<'_> {} impl ::core::default::Default for DisplayPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { display: DisplayKHR::default(), display_name: ::core::ptr::null(), physical_dimensions: Extent2D::default(), physical_resolution: Extent2D::default(), supported_transforms: SurfaceTransformFlagsKHR::default(), plane_reorder_possible: Bool32::default(), persistent_content: Bool32::default(), _marker: PhantomData, } } } impl<'a> DisplayPropertiesKHR<'a> { #[inline] pub fn display(mut self, display: DisplayKHR) -> Self { self.display = display; self } #[inline] pub fn display_name(mut self, display_name: &'a CStr) -> Self { self.display_name = display_name.as_ptr(); self } #[inline] pub unsafe fn display_name_as_c_str(&self) -> Option<&CStr> { if self.display_name.is_null() { None } else { Some(CStr::from_ptr(self.display_name)) } } #[inline] pub fn physical_dimensions(mut self, physical_dimensions: Extent2D) -> Self { self.physical_dimensions = physical_dimensions; self } #[inline] pub fn physical_resolution(mut self, physical_resolution: Extent2D) -> Self { self.physical_resolution = physical_resolution; self } #[inline] pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self { self.supported_transforms = supported_transforms; self } #[inline] pub fn plane_reorder_possible(mut self, plane_reorder_possible: bool) -> Self { self.plane_reorder_possible = plane_reorder_possible.into(); self } #[inline] pub fn persistent_content(mut self, persistent_content: bool) -> Self { self.persistent_content = persistent_content.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DisplayPlanePropertiesKHR { pub current_display: DisplayKHR, pub current_stack_index: u32, } impl DisplayPlanePropertiesKHR { #[inline] pub fn current_display(mut self, current_display: DisplayKHR) -> Self { self.current_display = current_display; self } #[inline] pub fn current_stack_index(mut self, current_stack_index: u32) -> Self { self.current_stack_index = current_stack_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DisplayModeParametersKHR { pub visible_region: Extent2D, pub refresh_rate: u32, } impl DisplayModeParametersKHR { #[inline] pub fn visible_region(mut self, visible_region: Extent2D) -> Self { self.visible_region = visible_region; self } #[inline] pub fn refresh_rate(mut self, refresh_rate: u32) -> Self { self.refresh_rate = refresh_rate; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DisplayModePropertiesKHR { pub display_mode: DisplayModeKHR, pub parameters: DisplayModeParametersKHR, } impl DisplayModePropertiesKHR { #[inline] pub fn display_mode(mut self, display_mode: DisplayModeKHR) -> Self { self.display_mode = display_mode; self } #[inline] pub fn parameters(mut self, parameters: DisplayModeParametersKHR) -> Self { self.parameters = parameters; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayModeCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DisplayModeCreateFlagsKHR, pub parameters: DisplayModeParametersKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayModeCreateInfoKHR<'_> {} unsafe impl Sync for DisplayModeCreateInfoKHR<'_> {} impl ::core::default::Default for DisplayModeCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DisplayModeCreateFlagsKHR::default(), parameters: DisplayModeParametersKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayModeCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_MODE_CREATE_INFO_KHR; } impl<'a> DisplayModeCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: DisplayModeCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn parameters(mut self, parameters: DisplayModeParametersKHR) -> Self { self.parameters = parameters; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DisplayPlaneCapabilitiesKHR { pub supported_alpha: DisplayPlaneAlphaFlagsKHR, pub min_src_position: Offset2D, pub max_src_position: Offset2D, pub min_src_extent: Extent2D, pub max_src_extent: Extent2D, pub min_dst_position: Offset2D, pub max_dst_position: Offset2D, pub min_dst_extent: Extent2D, pub max_dst_extent: Extent2D, } impl DisplayPlaneCapabilitiesKHR { #[inline] pub fn supported_alpha(mut self, supported_alpha: DisplayPlaneAlphaFlagsKHR) -> Self { self.supported_alpha = supported_alpha; self } #[inline] pub fn min_src_position(mut self, min_src_position: Offset2D) -> Self { self.min_src_position = min_src_position; self } #[inline] pub fn max_src_position(mut self, max_src_position: Offset2D) -> Self { self.max_src_position = max_src_position; self } #[inline] pub fn min_src_extent(mut self, min_src_extent: Extent2D) -> Self { self.min_src_extent = min_src_extent; self } #[inline] pub fn max_src_extent(mut self, max_src_extent: Extent2D) -> Self { self.max_src_extent = max_src_extent; self } #[inline] pub fn min_dst_position(mut self, min_dst_position: Offset2D) -> Self { self.min_dst_position = min_dst_position; self } #[inline] pub fn max_dst_position(mut self, max_dst_position: Offset2D) -> Self { self.max_dst_position = max_dst_position; self } #[inline] pub fn min_dst_extent(mut self, min_dst_extent: Extent2D) -> Self { self.min_dst_extent = min_dst_extent; self } #[inline] pub fn max_dst_extent(mut self, max_dst_extent: Extent2D) -> Self { self.max_dst_extent = max_dst_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplaySurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DisplaySurfaceCreateFlagsKHR, pub display_mode: DisplayModeKHR, pub plane_index: u32, pub plane_stack_index: u32, pub transform: SurfaceTransformFlagsKHR, pub global_alpha: f32, pub alpha_mode: DisplayPlaneAlphaFlagsKHR, pub image_extent: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplaySurfaceCreateInfoKHR<'_> {} unsafe impl Sync for DisplaySurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for DisplaySurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DisplaySurfaceCreateFlagsKHR::default(), display_mode: DisplayModeKHR::default(), plane_index: u32::default(), plane_stack_index: u32::default(), transform: SurfaceTransformFlagsKHR::default(), global_alpha: f32::default(), alpha_mode: DisplayPlaneAlphaFlagsKHR::default(), image_extent: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplaySurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_SURFACE_CREATE_INFO_KHR; } impl<'a> DisplaySurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: DisplaySurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn display_mode(mut self, display_mode: DisplayModeKHR) -> Self { self.display_mode = display_mode; self } #[inline] pub fn plane_index(mut self, plane_index: u32) -> Self { self.plane_index = plane_index; self } #[inline] pub fn plane_stack_index(mut self, plane_stack_index: u32) -> Self { self.plane_stack_index = plane_stack_index; self } #[inline] pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self { self.transform = transform; self } #[inline] pub fn global_alpha(mut self, global_alpha: f32) -> Self { self.global_alpha = global_alpha; self } #[inline] pub fn alpha_mode(mut self, alpha_mode: DisplayPlaneAlphaFlagsKHR) -> Self { self.alpha_mode = alpha_mode; self } #[inline] pub fn image_extent(mut self, image_extent: Extent2D) -> Self { self.image_extent = image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPresentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_rect: Rect2D, pub dst_rect: Rect2D, pub persistent: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPresentInfoKHR<'_> {} unsafe impl Sync for DisplayPresentInfoKHR<'_> {} impl ::core::default::Default for DisplayPresentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_rect: Rect2D::default(), dst_rect: Rect2D::default(), persistent: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayPresentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PRESENT_INFO_KHR; } unsafe impl ExtendsPresentInfoKHR for DisplayPresentInfoKHR<'_> {} impl<'a> DisplayPresentInfoKHR<'a> { #[inline] pub fn src_rect(mut self, src_rect: Rect2D) -> Self { self.src_rect = src_rect; self } #[inline] pub fn dst_rect(mut self, dst_rect: Rect2D) -> Self { self.dst_rect = dst_rect; self } #[inline] pub fn persistent(mut self, persistent: bool) -> Self { self.persistent = persistent.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SurfaceCapabilitiesKHR { pub min_image_count: u32, pub max_image_count: u32, pub current_extent: Extent2D, pub min_image_extent: Extent2D, pub max_image_extent: Extent2D, pub max_image_array_layers: u32, pub supported_transforms: SurfaceTransformFlagsKHR, pub current_transform: SurfaceTransformFlagsKHR, pub supported_composite_alpha: CompositeAlphaFlagsKHR, pub supported_usage_flags: ImageUsageFlags, } impl SurfaceCapabilitiesKHR { #[inline] pub fn min_image_count(mut self, min_image_count: u32) -> Self { self.min_image_count = min_image_count; self } #[inline] pub fn max_image_count(mut self, max_image_count: u32) -> Self { self.max_image_count = max_image_count; self } #[inline] pub fn current_extent(mut self, current_extent: Extent2D) -> Self { self.current_extent = current_extent; self } #[inline] pub fn min_image_extent(mut self, min_image_extent: Extent2D) -> Self { self.min_image_extent = min_image_extent; self } #[inline] pub fn max_image_extent(mut self, max_image_extent: Extent2D) -> Self { self.max_image_extent = max_image_extent; self } #[inline] pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self { self.max_image_array_layers = max_image_array_layers; self } #[inline] pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self { self.supported_transforms = supported_transforms; self } #[inline] pub fn current_transform(mut self, current_transform: SurfaceTransformFlagsKHR) -> Self { self.current_transform = current_transform; self } #[inline] pub fn supported_composite_alpha( mut self, supported_composite_alpha: CompositeAlphaFlagsKHR, ) -> Self { self.supported_composite_alpha = supported_composite_alpha; self } #[inline] pub fn supported_usage_flags(mut self, supported_usage_flags: ImageUsageFlags) -> Self { self.supported_usage_flags = supported_usage_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidSurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: AndroidSurfaceCreateFlagsKHR, pub window: *mut ANativeWindow, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidSurfaceCreateInfoKHR<'_> {} unsafe impl Sync for AndroidSurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for AndroidSurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: AndroidSurfaceCreateFlagsKHR::default(), window: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidSurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_SURFACE_CREATE_INFO_KHR; } impl<'a> AndroidSurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: AndroidSurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn window(mut self, window: *mut ANativeWindow) -> Self { self.window = window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ViSurfaceCreateInfoNN<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ViSurfaceCreateFlagsNN, pub window: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ViSurfaceCreateInfoNN<'_> {} unsafe impl Sync for ViSurfaceCreateInfoNN<'_> {} impl ::core::default::Default for ViSurfaceCreateInfoNN<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ViSurfaceCreateFlagsNN::default(), window: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ViSurfaceCreateInfoNN<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VI_SURFACE_CREATE_INFO_NN; } impl<'a> ViSurfaceCreateInfoNN<'a> { #[inline] pub fn flags(mut self, flags: ViSurfaceCreateFlagsNN) -> Self { self.flags = flags; self } #[inline] pub fn window(mut self, window: *mut c_void) -> Self { self.window = window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct WaylandSurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: WaylandSurfaceCreateFlagsKHR, pub display: *mut wl_display, pub surface: *mut wl_surface, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for WaylandSurfaceCreateInfoKHR<'_> {} unsafe impl Sync for WaylandSurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for WaylandSurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: WaylandSurfaceCreateFlagsKHR::default(), display: ::core::ptr::null_mut(), surface: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for WaylandSurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WAYLAND_SURFACE_CREATE_INFO_KHR; } impl<'a> WaylandSurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: WaylandSurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn display(mut self, display: *mut wl_display) -> Self { self.display = display; self } #[inline] pub fn surface(mut self, surface: *mut wl_surface) -> Self { self.surface = surface; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct Win32SurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: Win32SurfaceCreateFlagsKHR, pub hinstance: HINSTANCE, pub hwnd: HWND, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for Win32SurfaceCreateInfoKHR<'_> {} unsafe impl Sync for Win32SurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for Win32SurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: Win32SurfaceCreateFlagsKHR::default(), hinstance: unsafe { ::core::mem::zeroed() }, hwnd: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for Win32SurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WIN32_SURFACE_CREATE_INFO_KHR; } impl<'a> Win32SurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: Win32SurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn hinstance(mut self, hinstance: HINSTANCE) -> Self { self.hinstance = hinstance; self } #[inline] pub fn hwnd(mut self, hwnd: HWND) -> Self { self.hwnd = hwnd; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct XlibSurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: XlibSurfaceCreateFlagsKHR, pub dpy: *mut Display, pub window: Window, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for XlibSurfaceCreateInfoKHR<'_> {} unsafe impl Sync for XlibSurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for XlibSurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: XlibSurfaceCreateFlagsKHR::default(), dpy: ::core::ptr::null_mut(), window: Window::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for XlibSurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::XLIB_SURFACE_CREATE_INFO_KHR; } impl<'a> XlibSurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: XlibSurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn dpy(mut self, dpy: *mut Display) -> Self { self.dpy = dpy; self } #[inline] pub fn window(mut self, window: Window) -> Self { self.window = window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct XcbSurfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: XcbSurfaceCreateFlagsKHR, pub connection: *mut xcb_connection_t, pub window: xcb_window_t, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for XcbSurfaceCreateInfoKHR<'_> {} unsafe impl Sync for XcbSurfaceCreateInfoKHR<'_> {} impl ::core::default::Default for XcbSurfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: XcbSurfaceCreateFlagsKHR::default(), connection: ::core::ptr::null_mut(), window: xcb_window_t::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for XcbSurfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::XCB_SURFACE_CREATE_INFO_KHR; } impl<'a> XcbSurfaceCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: XcbSurfaceCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn connection(mut self, connection: *mut xcb_connection_t) -> Self { self.connection = connection; self } #[inline] pub fn window(mut self, window: xcb_window_t) -> Self { self.window = window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DirectFBSurfaceCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DirectFBSurfaceCreateFlagsEXT, pub dfb: *mut IDirectFB, pub surface: *mut IDirectFBSurface, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DirectFBSurfaceCreateInfoEXT<'_> {} unsafe impl Sync for DirectFBSurfaceCreateInfoEXT<'_> {} impl ::core::default::Default for DirectFBSurfaceCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DirectFBSurfaceCreateFlagsEXT::default(), dfb: ::core::ptr::null_mut(), surface: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DirectFBSurfaceCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DIRECTFB_SURFACE_CREATE_INFO_EXT; } impl<'a> DirectFBSurfaceCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: DirectFBSurfaceCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn dfb(mut self, dfb: *mut IDirectFB) -> Self { self.dfb = dfb; self } #[inline] pub fn surface(mut self, surface: *mut IDirectFBSurface) -> Self { self.surface = surface; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImagePipeSurfaceCreateInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ImagePipeSurfaceCreateFlagsFUCHSIA, pub image_pipe_handle: zx_handle_t, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImagePipeSurfaceCreateInfoFUCHSIA<'_> {} unsafe impl Sync for ImagePipeSurfaceCreateInfoFUCHSIA<'_> {} impl ::core::default::Default for ImagePipeSurfaceCreateInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ImagePipeSurfaceCreateFlagsFUCHSIA::default(), image_pipe_handle: zx_handle_t::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImagePipeSurfaceCreateInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA; } impl<'a> ImagePipeSurfaceCreateInfoFUCHSIA<'a> { #[inline] pub fn flags(mut self, flags: ImagePipeSurfaceCreateFlagsFUCHSIA) -> Self { self.flags = flags; self } #[inline] pub fn image_pipe_handle(mut self, image_pipe_handle: zx_handle_t) -> Self { self.image_pipe_handle = image_pipe_handle; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct StreamDescriptorSurfaceCreateInfoGGP<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: StreamDescriptorSurfaceCreateFlagsGGP, pub stream_descriptor: GgpStreamDescriptor, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for StreamDescriptorSurfaceCreateInfoGGP<'_> {} unsafe impl Sync for StreamDescriptorSurfaceCreateInfoGGP<'_> {} impl ::core::default::Default for StreamDescriptorSurfaceCreateInfoGGP<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: StreamDescriptorSurfaceCreateFlagsGGP::default(), stream_descriptor: GgpStreamDescriptor::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for StreamDescriptorSurfaceCreateInfoGGP<'a> { const STRUCTURE_TYPE: StructureType = StructureType::STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP; } impl<'a> StreamDescriptorSurfaceCreateInfoGGP<'a> { #[inline] pub fn flags(mut self, flags: StreamDescriptorSurfaceCreateFlagsGGP) -> Self { self.flags = flags; self } #[inline] pub fn stream_descriptor(mut self, stream_descriptor: GgpStreamDescriptor) -> Self { self.stream_descriptor = stream_descriptor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ScreenSurfaceCreateInfoQNX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ScreenSurfaceCreateFlagsQNX, pub context: *mut _screen_context, pub window: *mut _screen_window, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ScreenSurfaceCreateInfoQNX<'_> {} unsafe impl Sync for ScreenSurfaceCreateInfoQNX<'_> {} impl ::core::default::Default for ScreenSurfaceCreateInfoQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ScreenSurfaceCreateFlagsQNX::default(), context: ::core::ptr::null_mut(), window: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ScreenSurfaceCreateInfoQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SCREEN_SURFACE_CREATE_INFO_QNX; } impl<'a> ScreenSurfaceCreateInfoQNX<'a> { #[inline] pub fn flags(mut self, flags: ScreenSurfaceCreateFlagsQNX) -> Self { self.flags = flags; self } #[inline] pub fn context(mut self, context: &'a mut _screen_context) -> Self { self.context = context; self } #[inline] pub fn window(mut self, window: &'a mut _screen_window) -> Self { self.window = window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)] #[doc = ""] #[must_use] pub struct SurfaceFormatKHR { pub format: Format, pub color_space: ColorSpaceKHR, } impl SurfaceFormatKHR { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn color_space(mut self, color_space: ColorSpaceKHR) -> Self { self.color_space = color_space; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SwapchainCreateFlagsKHR, pub surface: SurfaceKHR, pub min_image_count: u32, pub image_format: Format, pub image_color_space: ColorSpaceKHR, pub image_extent: Extent2D, pub image_array_layers: u32, pub image_usage: ImageUsageFlags, pub image_sharing_mode: SharingMode, pub queue_family_index_count: u32, pub p_queue_family_indices: *const u32, pub pre_transform: SurfaceTransformFlagsKHR, pub composite_alpha: CompositeAlphaFlagsKHR, pub present_mode: PresentModeKHR, pub clipped: Bool32, pub old_swapchain: SwapchainKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainCreateInfoKHR<'_> {} unsafe impl Sync for SwapchainCreateInfoKHR<'_> {} impl ::core::default::Default for SwapchainCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SwapchainCreateFlagsKHR::default(), surface: SurfaceKHR::default(), min_image_count: u32::default(), image_format: Format::default(), image_color_space: ColorSpaceKHR::default(), image_extent: Extent2D::default(), image_array_layers: u32::default(), image_usage: ImageUsageFlags::default(), image_sharing_mode: SharingMode::default(), queue_family_index_count: u32::default(), p_queue_family_indices: ::core::ptr::null(), pre_transform: SurfaceTransformFlagsKHR::default(), composite_alpha: CompositeAlphaFlagsKHR::default(), present_mode: PresentModeKHR::default(), clipped: Bool32::default(), old_swapchain: SwapchainKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_CREATE_INFO_KHR; } pub unsafe trait ExtendsSwapchainCreateInfoKHR {} impl<'a> SwapchainCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: SwapchainCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn surface(mut self, surface: SurfaceKHR) -> Self { self.surface = surface; self } #[inline] pub fn min_image_count(mut self, min_image_count: u32) -> Self { self.min_image_count = min_image_count; self } #[inline] pub fn image_format(mut self, image_format: Format) -> Self { self.image_format = image_format; self } #[inline] pub fn image_color_space(mut self, image_color_space: ColorSpaceKHR) -> Self { self.image_color_space = image_color_space; self } #[inline] pub fn image_extent(mut self, image_extent: Extent2D) -> Self { self.image_extent = image_extent; self } #[inline] pub fn image_array_layers(mut self, image_array_layers: u32) -> Self { self.image_array_layers = image_array_layers; self } #[inline] pub fn image_usage(mut self, image_usage: ImageUsageFlags) -> Self { self.image_usage = image_usage; self } #[inline] pub fn image_sharing_mode(mut self, image_sharing_mode: SharingMode) -> Self { self.image_sharing_mode = image_sharing_mode; self } #[inline] pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self { self.queue_family_index_count = queue_family_indices.len() as _; self.p_queue_family_indices = queue_family_indices.as_ptr(); self } #[inline] pub fn pre_transform(mut self, pre_transform: SurfaceTransformFlagsKHR) -> Self { self.pre_transform = pre_transform; self } #[inline] pub fn composite_alpha(mut self, composite_alpha: CompositeAlphaFlagsKHR) -> Self { self.composite_alpha = composite_alpha; self } #[inline] pub fn present_mode(mut self, present_mode: PresentModeKHR) -> Self { self.present_mode = present_mode; self } #[inline] pub fn clipped(mut self, clipped: bool) -> Self { self.clipped = clipped.into(); self } #[inline] pub fn old_swapchain(mut self, old_swapchain: SwapchainKHR) -> Self { self.old_swapchain = old_swapchain; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_count: u32, pub p_wait_semaphores: *const Semaphore, pub swapchain_count: u32, pub p_swapchains: *const SwapchainKHR, pub p_image_indices: *const u32, pub p_results: *mut Result, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentInfoKHR<'_> {} unsafe impl Sync for PresentInfoKHR<'_> {} impl ::core::default::Default for PresentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_count: u32::default(), p_wait_semaphores: ::core::ptr::null(), swapchain_count: u32::default(), p_swapchains: ::core::ptr::null(), p_image_indices: ::core::ptr::null(), p_results: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PresentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_INFO_KHR; } pub unsafe trait ExtendsPresentInfoKHR {} impl<'a> PresentInfoKHR<'a> { #[inline] pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self { self.wait_semaphore_count = wait_semaphores.len() as _; self.p_wait_semaphores = wait_semaphores.as_ptr(); self } #[inline] pub fn swapchains(mut self, swapchains: &'a [SwapchainKHR]) -> Self { self.swapchain_count = swapchains.len() as _; self.p_swapchains = swapchains.as_ptr(); self } #[inline] pub fn image_indices(mut self, image_indices: &'a [u32]) -> Self { self.swapchain_count = image_indices.len() as _; self.p_image_indices = image_indices.as_ptr(); self } #[inline] pub fn results(mut self, results: &'a mut [Result]) -> Self { self.swapchain_count = results.len() as _; self.p_results = results.as_mut_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugReportCallbackCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DebugReportFlagsEXT, pub pfn_callback: PFN_vkDebugReportCallbackEXT, pub p_user_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugReportCallbackCreateInfoEXT<'_> {} unsafe impl Sync for DebugReportCallbackCreateInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DebugReportCallbackCreateInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DebugReportCallbackCreateInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("flags", &self.flags) .field("pfn_callback", &(self.pfn_callback.map(|x| x as *const ()))) .field("p_user_data", &self.p_user_data) .finish() } } impl ::core::default::Default for DebugReportCallbackCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DebugReportFlagsEXT::default(), pfn_callback: PFN_vkDebugReportCallbackEXT::default(), p_user_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugReportCallbackCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT; } unsafe impl ExtendsInstanceCreateInfo for DebugReportCallbackCreateInfoEXT<'_> {} impl<'a> DebugReportCallbackCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: DebugReportFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn pfn_callback(mut self, pfn_callback: PFN_vkDebugReportCallbackEXT) -> Self { self.pfn_callback = pfn_callback; self } #[inline] pub fn user_data(mut self, user_data: *mut c_void) -> Self { self.p_user_data = user_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ValidationFlagsEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub disabled_validation_check_count: u32, pub p_disabled_validation_checks: *const ValidationCheckEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ValidationFlagsEXT<'_> {} unsafe impl Sync for ValidationFlagsEXT<'_> {} impl ::core::default::Default for ValidationFlagsEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), disabled_validation_check_count: u32::default(), p_disabled_validation_checks: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ValidationFlagsEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_FLAGS_EXT; } unsafe impl ExtendsInstanceCreateInfo for ValidationFlagsEXT<'_> {} impl<'a> ValidationFlagsEXT<'a> { #[inline] pub fn disabled_validation_checks( mut self, disabled_validation_checks: &'a [ValidationCheckEXT], ) -> Self { self.disabled_validation_check_count = disabled_validation_checks.len() as _; self.p_disabled_validation_checks = disabled_validation_checks.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ValidationFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub enabled_validation_feature_count: u32, pub p_enabled_validation_features: *const ValidationFeatureEnableEXT, pub disabled_validation_feature_count: u32, pub p_disabled_validation_features: *const ValidationFeatureDisableEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ValidationFeaturesEXT<'_> {} unsafe impl Sync for ValidationFeaturesEXT<'_> {} impl ::core::default::Default for ValidationFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), enabled_validation_feature_count: u32::default(), p_enabled_validation_features: ::core::ptr::null(), disabled_validation_feature_count: u32::default(), p_disabled_validation_features: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ValidationFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_FEATURES_EXT; } unsafe impl ExtendsInstanceCreateInfo for ValidationFeaturesEXT<'_> {} impl<'a> ValidationFeaturesEXT<'a> { #[inline] pub fn enabled_validation_features( mut self, enabled_validation_features: &'a [ValidationFeatureEnableEXT], ) -> Self { self.enabled_validation_feature_count = enabled_validation_features.len() as _; self.p_enabled_validation_features = enabled_validation_features.as_ptr(); self } #[inline] pub fn disabled_validation_features( mut self, disabled_validation_features: &'a [ValidationFeatureDisableEXT], ) -> Self { self.disabled_validation_feature_count = disabled_validation_features.len() as _; self.p_disabled_validation_features = disabled_validation_features.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LayerSettingsCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub setting_count: u32, pub p_settings: *const LayerSettingEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LayerSettingsCreateInfoEXT<'_> {} unsafe impl Sync for LayerSettingsCreateInfoEXT<'_> {} impl ::core::default::Default for LayerSettingsCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), setting_count: u32::default(), p_settings: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LayerSettingsCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LAYER_SETTINGS_CREATE_INFO_EXT; } unsafe impl ExtendsInstanceCreateInfo for LayerSettingsCreateInfoEXT<'_> {} impl<'a> LayerSettingsCreateInfoEXT<'a> { #[inline] pub fn settings(mut self, settings: &'a [LayerSettingEXT<'a>]) -> Self { self.setting_count = settings.len() as _; self.p_settings = settings.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LayerSettingEXT<'a> { pub p_layer_name: *const c_char, pub p_setting_name: *const c_char, pub ty: LayerSettingTypeEXT, pub value_count: u32, pub p_values: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LayerSettingEXT<'_> {} unsafe impl Sync for LayerSettingEXT<'_> {} impl ::core::default::Default for LayerSettingEXT<'_> { #[inline] fn default() -> Self { Self { p_layer_name: ::core::ptr::null(), p_setting_name: ::core::ptr::null(), ty: LayerSettingTypeEXT::default(), value_count: u32::default(), p_values: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> LayerSettingEXT<'a> { #[inline] pub fn layer_name(mut self, layer_name: &'a CStr) -> Self { self.p_layer_name = layer_name.as_ptr(); self } #[inline] pub unsafe fn layer_name_as_c_str(&self) -> Option<&CStr> { if self.p_layer_name.is_null() { None } else { Some(CStr::from_ptr(self.p_layer_name)) } } #[inline] pub fn setting_name(mut self, setting_name: &'a CStr) -> Self { self.p_setting_name = setting_name.as_ptr(); self } #[inline] pub unsafe fn setting_name_as_c_str(&self) -> Option<&CStr> { if self.p_setting_name.is_null() { None } else { Some(CStr::from_ptr(self.p_setting_name)) } } #[inline] pub fn ty(mut self, ty: LayerSettingTypeEXT) -> Self { self.ty = ty; self } #[inline] pub fn values(mut self, values: &'a [u8]) -> Self { self.value_count = values.len() as _; self.p_values = values.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationStateRasterizationOrderAMD<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub rasterization_order: RasterizationOrderAMD, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationStateRasterizationOrderAMD<'_> {} unsafe impl Sync for PipelineRasterizationStateRasterizationOrderAMD<'_> {} impl ::core::default::Default for PipelineRasterizationStateRasterizationOrderAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), rasterization_order: RasterizationOrderAMD::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationStateRasterizationOrderAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationStateRasterizationOrderAMD<'_> { } impl<'a> PipelineRasterizationStateRasterizationOrderAMD<'a> { #[inline] pub fn rasterization_order(mut self, rasterization_order: RasterizationOrderAMD) -> Self { self.rasterization_order = rasterization_order; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugMarkerObjectNameInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub object_type: DebugReportObjectTypeEXT, pub object: u64, pub p_object_name: *const c_char, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugMarkerObjectNameInfoEXT<'_> {} unsafe impl Sync for DebugMarkerObjectNameInfoEXT<'_> {} impl ::core::default::Default for DebugMarkerObjectNameInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), object_type: DebugReportObjectTypeEXT::default(), object: u64::default(), p_object_name: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugMarkerObjectNameInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_OBJECT_NAME_INFO_EXT; } impl<'a> DebugMarkerObjectNameInfoEXT<'a> { #[inline] pub fn object_type(mut self, object_type: DebugReportObjectTypeEXT) -> Self { self.object_type = object_type; self } #[inline] pub fn object(mut self, object: u64) -> Self { self.object = object; self } #[inline] pub fn object_name(mut self, object_name: &'a CStr) -> Self { self.p_object_name = object_name.as_ptr(); self } #[inline] pub unsafe fn object_name_as_c_str(&self) -> Option<&CStr> { if self.p_object_name.is_null() { None } else { Some(CStr::from_ptr(self.p_object_name)) } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugMarkerObjectTagInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub object_type: DebugReportObjectTypeEXT, pub object: u64, pub tag_name: u64, pub tag_size: usize, pub p_tag: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugMarkerObjectTagInfoEXT<'_> {} unsafe impl Sync for DebugMarkerObjectTagInfoEXT<'_> {} impl ::core::default::Default for DebugMarkerObjectTagInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), object_type: DebugReportObjectTypeEXT::default(), object: u64::default(), tag_name: u64::default(), tag_size: usize::default(), p_tag: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugMarkerObjectTagInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_OBJECT_TAG_INFO_EXT; } impl<'a> DebugMarkerObjectTagInfoEXT<'a> { #[inline] pub fn object_type(mut self, object_type: DebugReportObjectTypeEXT) -> Self { self.object_type = object_type; self } #[inline] pub fn object(mut self, object: u64) -> Self { self.object = object; self } #[inline] pub fn tag_name(mut self, tag_name: u64) -> Self { self.tag_name = tag_name; self } #[inline] pub fn tag(mut self, tag: &'a [u8]) -> Self { self.tag_size = tag.len(); self.p_tag = tag.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugMarkerMarkerInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_marker_name: *const c_char, pub color: [f32; 4], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugMarkerMarkerInfoEXT<'_> {} unsafe impl Sync for DebugMarkerMarkerInfoEXT<'_> {} impl ::core::default::Default for DebugMarkerMarkerInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_marker_name: ::core::ptr::null(), color: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugMarkerMarkerInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_MARKER_INFO_EXT; } impl<'a> DebugMarkerMarkerInfoEXT<'a> { #[inline] pub fn marker_name(mut self, marker_name: &'a CStr) -> Self { self.p_marker_name = marker_name.as_ptr(); self } #[inline] pub unsafe fn marker_name_as_c_str(&self) -> Option<&CStr> { if self.p_marker_name.is_null() { None } else { Some(CStr::from_ptr(self.p_marker_name)) } } #[inline] pub fn color(mut self, color: [f32; 4]) -> Self { self.color = color; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DedicatedAllocationImageCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub dedicated_allocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DedicatedAllocationImageCreateInfoNV<'_> {} unsafe impl Sync for DedicatedAllocationImageCreateInfoNV<'_> {} impl ::core::default::Default for DedicatedAllocationImageCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), dedicated_allocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DedicatedAllocationImageCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV; } unsafe impl ExtendsImageCreateInfo for DedicatedAllocationImageCreateInfoNV<'_> {} impl<'a> DedicatedAllocationImageCreateInfoNV<'a> { #[inline] pub fn dedicated_allocation(mut self, dedicated_allocation: bool) -> Self { self.dedicated_allocation = dedicated_allocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DedicatedAllocationBufferCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub dedicated_allocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DedicatedAllocationBufferCreateInfoNV<'_> {} unsafe impl Sync for DedicatedAllocationBufferCreateInfoNV<'_> {} impl ::core::default::Default for DedicatedAllocationBufferCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), dedicated_allocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DedicatedAllocationBufferCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV; } unsafe impl ExtendsBufferCreateInfo for DedicatedAllocationBufferCreateInfoNV<'_> {} impl<'a> DedicatedAllocationBufferCreateInfoNV<'a> { #[inline] pub fn dedicated_allocation(mut self, dedicated_allocation: bool) -> Self { self.dedicated_allocation = dedicated_allocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DedicatedAllocationMemoryAllocateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DedicatedAllocationMemoryAllocateInfoNV<'_> {} unsafe impl Sync for DedicatedAllocationMemoryAllocateInfoNV<'_> {} impl ::core::default::Default for DedicatedAllocationMemoryAllocateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DedicatedAllocationMemoryAllocateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV; } unsafe impl ExtendsMemoryAllocateInfo for DedicatedAllocationMemoryAllocateInfoNV<'_> {} impl<'a> DedicatedAllocationMemoryAllocateInfoNV<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ExternalImageFormatPropertiesNV { pub image_format_properties: ImageFormatProperties, pub external_memory_features: ExternalMemoryFeatureFlagsNV, pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV, pub compatible_handle_types: ExternalMemoryHandleTypeFlagsNV, } impl ExternalImageFormatPropertiesNV { #[inline] pub fn image_format_properties( mut self, image_format_properties: ImageFormatProperties, ) -> Self { self.image_format_properties = image_format_properties; self } #[inline] pub fn external_memory_features( mut self, external_memory_features: ExternalMemoryFeatureFlagsNV, ) -> Self { self.external_memory_features = external_memory_features; self } #[inline] pub fn export_from_imported_handle_types( mut self, export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV, ) -> Self { self.export_from_imported_handle_types = export_from_imported_handle_types; self } #[inline] pub fn compatible_handle_types( mut self, compatible_handle_types: ExternalMemoryHandleTypeFlagsNV, ) -> Self { self.compatible_handle_types = compatible_handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalMemoryImageCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalMemoryHandleTypeFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalMemoryImageCreateInfoNV<'_> {} unsafe impl Sync for ExternalMemoryImageCreateInfoNV<'_> {} impl ::core::default::Default for ExternalMemoryImageCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalMemoryHandleTypeFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalMemoryImageCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV; } unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoNV<'_> {} impl<'a> ExternalMemoryImageCreateInfoNV<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlagsNV) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMemoryAllocateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalMemoryHandleTypeFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMemoryAllocateInfoNV<'_> {} unsafe impl Sync for ExportMemoryAllocateInfoNV<'_> {} impl ::core::default::Default for ExportMemoryAllocateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalMemoryHandleTypeFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMemoryAllocateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_ALLOCATE_INFO_NV; } unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoNV<'_> {} impl<'a> ExportMemoryAllocateInfoNV<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlagsNV) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryWin32HandleInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlagsNV, pub handle: HANDLE, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryWin32HandleInfoNV<'_> {} unsafe impl Sync for ImportMemoryWin32HandleInfoNV<'_> {} impl ::core::default::Default for ImportMemoryWin32HandleInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlagsNV::default(), handle: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryWin32HandleInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoNV<'_> {} impl<'a> ImportMemoryWin32HandleInfoNV<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlagsNV) -> Self { self.handle_type = handle_type; self } #[inline] pub fn handle(mut self, handle: HANDLE) -> Self { self.handle = handle; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMemoryWin32HandleInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_attributes: *const SECURITY_ATTRIBUTES, pub dw_access: DWORD, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMemoryWin32HandleInfoNV<'_> {} unsafe impl Sync for ExportMemoryWin32HandleInfoNV<'_> {} impl ::core::default::Default for ExportMemoryWin32HandleInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_attributes: ::core::ptr::null(), dw_access: DWORD::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMemoryWin32HandleInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV; } unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoNV<'_> {} impl<'a> ExportMemoryWin32HandleInfoNV<'a> { #[inline] pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self { self.p_attributes = attributes; self } #[inline] pub fn dw_access(mut self, dw_access: DWORD) -> Self { self.dw_access = dw_access; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct Win32KeyedMutexAcquireReleaseInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acquire_count: u32, pub p_acquire_syncs: *const DeviceMemory, pub p_acquire_keys: *const u64, pub p_acquire_timeout_milliseconds: *const u32, pub release_count: u32, pub p_release_syncs: *const DeviceMemory, pub p_release_keys: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for Win32KeyedMutexAcquireReleaseInfoNV<'_> {} unsafe impl Sync for Win32KeyedMutexAcquireReleaseInfoNV<'_> {} impl ::core::default::Default for Win32KeyedMutexAcquireReleaseInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acquire_count: u32::default(), p_acquire_syncs: ::core::ptr::null(), p_acquire_keys: ::core::ptr::null(), p_acquire_timeout_milliseconds: ::core::ptr::null(), release_count: u32::default(), p_release_syncs: ::core::ptr::null(), p_release_keys: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for Win32KeyedMutexAcquireReleaseInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV; } unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNV<'_> {} unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoNV<'_> {} impl<'a> Win32KeyedMutexAcquireReleaseInfoNV<'a> { #[inline] pub fn acquire_syncs(mut self, acquire_syncs: &'a [DeviceMemory]) -> Self { self.acquire_count = acquire_syncs.len() as _; self.p_acquire_syncs = acquire_syncs.as_ptr(); self } #[inline] pub fn acquire_keys(mut self, acquire_keys: &'a [u64]) -> Self { self.acquire_count = acquire_keys.len() as _; self.p_acquire_keys = acquire_keys.as_ptr(); self } #[inline] pub fn acquire_timeout_milliseconds(mut self, acquire_timeout_milliseconds: &'a [u32]) -> Self { self.acquire_count = acquire_timeout_milliseconds.len() as _; self.p_acquire_timeout_milliseconds = acquire_timeout_milliseconds.as_ptr(); self } #[inline] pub fn release_syncs(mut self, release_syncs: &'a [DeviceMemory]) -> Self { self.release_count = release_syncs.len() as _; self.p_release_syncs = release_syncs.as_ptr(); self } #[inline] pub fn release_keys(mut self, release_keys: &'a [u64]) -> Self { self.release_count = release_keys.len() as _; self.p_release_keys = release_keys.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_generated_commands: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_generated_commands: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'_> {} impl<'a> PhysicalDeviceDeviceGeneratedCommandsFeaturesNV<'a> { #[inline] pub fn device_generated_commands(mut self, device_generated_commands: bool) -> Self { self.device_generated_commands = device_generated_commands.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_generated_compute: Bool32, pub device_generated_compute_pipelines: Bool32, pub device_generated_compute_capture_replay: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_generated_compute: Bool32::default(), device_generated_compute_pipelines: Bool32::default(), device_generated_compute_capture_replay: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'_> {} impl<'a> PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV<'a> { #[inline] pub fn device_generated_compute(mut self, device_generated_compute: bool) -> Self { self.device_generated_compute = device_generated_compute.into(); self } #[inline] pub fn device_generated_compute_pipelines( mut self, device_generated_compute_pipelines: bool, ) -> Self { self.device_generated_compute_pipelines = device_generated_compute_pipelines.into(); self } #[inline] pub fn device_generated_compute_capture_replay( mut self, device_generated_compute_capture_replay: bool, ) -> Self { self.device_generated_compute_capture_replay = device_generated_compute_capture_replay.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DevicePrivateDataCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub private_data_slot_request_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DevicePrivateDataCreateInfo<'_> {} unsafe impl Sync for DevicePrivateDataCreateInfo<'_> {} impl ::core::default::Default for DevicePrivateDataCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), private_data_slot_request_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DevicePrivateDataCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_PRIVATE_DATA_CREATE_INFO; } unsafe impl ExtendsDeviceCreateInfo for DevicePrivateDataCreateInfo<'_> {} impl<'a> DevicePrivateDataCreateInfo<'a> { #[inline] pub fn private_data_slot_request_count(mut self, private_data_slot_request_count: u32) -> Self { self.private_data_slot_request_count = private_data_slot_request_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PrivateDataSlotCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PrivateDataSlotCreateFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PrivateDataSlotCreateInfo<'_> {} unsafe impl Sync for PrivateDataSlotCreateInfo<'_> {} impl ::core::default::Default for PrivateDataSlotCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PrivateDataSlotCreateFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PrivateDataSlotCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRIVATE_DATA_SLOT_CREATE_INFO; } impl<'a> PrivateDataSlotCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: PrivateDataSlotCreateFlags) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePrivateDataFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub private_data: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePrivateDataFeatures<'_> {} unsafe impl Sync for PhysicalDevicePrivateDataFeatures<'_> {} impl ::core::default::Default for PhysicalDevicePrivateDataFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), private_data: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePrivateDataFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrivateDataFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrivateDataFeatures<'_> {} impl<'a> PhysicalDevicePrivateDataFeatures<'a> { #[inline] pub fn private_data(mut self, private_data: bool) -> Self { self.private_data = private_data.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_graphics_shader_group_count: u32, pub max_indirect_sequence_count: u32, pub max_indirect_commands_token_count: u32, pub max_indirect_commands_stream_count: u32, pub max_indirect_commands_token_offset: u32, pub max_indirect_commands_stream_stride: u32, pub min_sequences_count_buffer_offset_alignment: u32, pub min_sequences_index_buffer_offset_alignment: u32, pub min_indirect_commands_buffer_offset_alignment: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_graphics_shader_group_count: u32::default(), max_indirect_sequence_count: u32::default(), max_indirect_commands_token_count: u32::default(), max_indirect_commands_stream_count: u32::default(), max_indirect_commands_token_offset: u32::default(), max_indirect_commands_stream_stride: u32::default(), min_sequences_count_buffer_offset_alignment: u32::default(), min_sequences_index_buffer_offset_alignment: u32::default(), min_indirect_commands_buffer_offset_alignment: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'_> { } impl<'a> PhysicalDeviceDeviceGeneratedCommandsPropertiesNV<'a> { #[inline] pub fn max_graphics_shader_group_count(mut self, max_graphics_shader_group_count: u32) -> Self { self.max_graphics_shader_group_count = max_graphics_shader_group_count; self } #[inline] pub fn max_indirect_sequence_count(mut self, max_indirect_sequence_count: u32) -> Self { self.max_indirect_sequence_count = max_indirect_sequence_count; self } #[inline] pub fn max_indirect_commands_token_count( mut self, max_indirect_commands_token_count: u32, ) -> Self { self.max_indirect_commands_token_count = max_indirect_commands_token_count; self } #[inline] pub fn max_indirect_commands_stream_count( mut self, max_indirect_commands_stream_count: u32, ) -> Self { self.max_indirect_commands_stream_count = max_indirect_commands_stream_count; self } #[inline] pub fn max_indirect_commands_token_offset( mut self, max_indirect_commands_token_offset: u32, ) -> Self { self.max_indirect_commands_token_offset = max_indirect_commands_token_offset; self } #[inline] pub fn max_indirect_commands_stream_stride( mut self, max_indirect_commands_stream_stride: u32, ) -> Self { self.max_indirect_commands_stream_stride = max_indirect_commands_stream_stride; self } #[inline] pub fn min_sequences_count_buffer_offset_alignment( mut self, min_sequences_count_buffer_offset_alignment: u32, ) -> Self { self.min_sequences_count_buffer_offset_alignment = min_sequences_count_buffer_offset_alignment; self } #[inline] pub fn min_sequences_index_buffer_offset_alignment( mut self, min_sequences_index_buffer_offset_alignment: u32, ) -> Self { self.min_sequences_index_buffer_offset_alignment = min_sequences_index_buffer_offset_alignment; self } #[inline] pub fn min_indirect_commands_buffer_offset_alignment( mut self, min_indirect_commands_buffer_offset_alignment: u32, ) -> Self { self.min_indirect_commands_buffer_offset_alignment = min_indirect_commands_buffer_offset_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiDrawPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_multi_draw_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiDrawPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMultiDrawPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMultiDrawPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_multi_draw_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiDrawPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiDrawPropertiesEXT<'_> {} impl<'a> PhysicalDeviceMultiDrawPropertiesEXT<'a> { #[inline] pub fn max_multi_draw_count(mut self, max_multi_draw_count: u32) -> Self { self.max_multi_draw_count = max_multi_draw_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GraphicsShaderGroupCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stage_count: u32, pub p_stages: *const PipelineShaderStageCreateInfo<'a>, pub p_vertex_input_state: *const PipelineVertexInputStateCreateInfo<'a>, pub p_tessellation_state: *const PipelineTessellationStateCreateInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GraphicsShaderGroupCreateInfoNV<'_> {} unsafe impl Sync for GraphicsShaderGroupCreateInfoNV<'_> {} impl ::core::default::Default for GraphicsShaderGroupCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stage_count: u32::default(), p_stages: ::core::ptr::null(), p_vertex_input_state: ::core::ptr::null(), p_tessellation_state: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GraphicsShaderGroupCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_SHADER_GROUP_CREATE_INFO_NV; } impl<'a> GraphicsShaderGroupCreateInfoNV<'a> { #[inline] pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo<'a>]) -> Self { self.stage_count = stages.len() as _; self.p_stages = stages.as_ptr(); self } #[inline] pub fn vertex_input_state( mut self, vertex_input_state: &'a PipelineVertexInputStateCreateInfo<'a>, ) -> Self { self.p_vertex_input_state = vertex_input_state; self } #[inline] pub fn tessellation_state( mut self, tessellation_state: &'a PipelineTessellationStateCreateInfo<'a>, ) -> Self { self.p_tessellation_state = tessellation_state; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GraphicsPipelineShaderGroupsCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub group_count: u32, pub p_groups: *const GraphicsShaderGroupCreateInfoNV<'a>, pub pipeline_count: u32, pub p_pipelines: *const Pipeline, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GraphicsPipelineShaderGroupsCreateInfoNV<'_> {} unsafe impl Sync for GraphicsPipelineShaderGroupsCreateInfoNV<'_> {} impl ::core::default::Default for GraphicsPipelineShaderGroupsCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), group_count: u32::default(), p_groups: ::core::ptr::null(), pipeline_count: u32::default(), p_pipelines: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GraphicsPipelineShaderGroupsCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV; } unsafe impl ExtendsGraphicsPipelineCreateInfo for GraphicsPipelineShaderGroupsCreateInfoNV<'_> {} impl<'a> GraphicsPipelineShaderGroupsCreateInfoNV<'a> { #[inline] pub fn groups(mut self, groups: &'a [GraphicsShaderGroupCreateInfoNV<'a>]) -> Self { self.group_count = groups.len() as _; self.p_groups = groups.as_ptr(); self } #[inline] pub fn pipelines(mut self, pipelines: &'a [Pipeline]) -> Self { self.pipeline_count = pipelines.len() as _; self.p_pipelines = pipelines.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BindShaderGroupIndirectCommandNV { pub group_index: u32, } impl BindShaderGroupIndirectCommandNV { #[inline] pub fn group_index(mut self, group_index: u32) -> Self { self.group_index = group_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BindIndexBufferIndirectCommandNV { pub buffer_address: DeviceAddress, pub size: u32, pub index_type: IndexType, } impl BindIndexBufferIndirectCommandNV { #[inline] pub fn buffer_address(mut self, buffer_address: DeviceAddress) -> Self { self.buffer_address = buffer_address; self } #[inline] pub fn size(mut self, size: u32) -> Self { self.size = size; self } #[inline] pub fn index_type(mut self, index_type: IndexType) -> Self { self.index_type = index_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BindVertexBufferIndirectCommandNV { pub buffer_address: DeviceAddress, pub size: u32, pub stride: u32, } impl BindVertexBufferIndirectCommandNV { #[inline] pub fn buffer_address(mut self, buffer_address: DeviceAddress) -> Self { self.buffer_address = buffer_address; self } #[inline] pub fn size(mut self, size: u32) -> Self { self.size = size; self } #[inline] pub fn stride(mut self, stride: u32) -> Self { self.stride = stride; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SetStateFlagsIndirectCommandNV { pub data: u32, } impl SetStateFlagsIndirectCommandNV { #[inline] pub fn data(mut self, data: u32) -> Self { self.data = data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct IndirectCommandsStreamNV { pub buffer: Buffer, pub offset: DeviceSize, } impl IndirectCommandsStreamNV { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct IndirectCommandsLayoutTokenNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub token_type: IndirectCommandsTokenTypeNV, pub stream: u32, pub offset: u32, pub vertex_binding_unit: u32, pub vertex_dynamic_stride: Bool32, pub pushconstant_pipeline_layout: PipelineLayout, pub pushconstant_shader_stage_flags: ShaderStageFlags, pub pushconstant_offset: u32, pub pushconstant_size: u32, pub indirect_state_flags: IndirectStateFlagsNV, pub index_type_count: u32, pub p_index_types: *const IndexType, pub p_index_type_values: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for IndirectCommandsLayoutTokenNV<'_> {} unsafe impl Sync for IndirectCommandsLayoutTokenNV<'_> {} impl ::core::default::Default for IndirectCommandsLayoutTokenNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), token_type: IndirectCommandsTokenTypeNV::default(), stream: u32::default(), offset: u32::default(), vertex_binding_unit: u32::default(), vertex_dynamic_stride: Bool32::default(), pushconstant_pipeline_layout: PipelineLayout::default(), pushconstant_shader_stage_flags: ShaderStageFlags::default(), pushconstant_offset: u32::default(), pushconstant_size: u32::default(), indirect_state_flags: IndirectStateFlagsNV::default(), index_type_count: u32::default(), p_index_types: ::core::ptr::null(), p_index_type_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for IndirectCommandsLayoutTokenNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::INDIRECT_COMMANDS_LAYOUT_TOKEN_NV; } impl<'a> IndirectCommandsLayoutTokenNV<'a> { #[inline] pub fn token_type(mut self, token_type: IndirectCommandsTokenTypeNV) -> Self { self.token_type = token_type; self } #[inline] pub fn stream(mut self, stream: u32) -> Self { self.stream = stream; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } #[inline] pub fn vertex_binding_unit(mut self, vertex_binding_unit: u32) -> Self { self.vertex_binding_unit = vertex_binding_unit; self } #[inline] pub fn vertex_dynamic_stride(mut self, vertex_dynamic_stride: bool) -> Self { self.vertex_dynamic_stride = vertex_dynamic_stride.into(); self } #[inline] pub fn pushconstant_pipeline_layout( mut self, pushconstant_pipeline_layout: PipelineLayout, ) -> Self { self.pushconstant_pipeline_layout = pushconstant_pipeline_layout; self } #[inline] pub fn pushconstant_shader_stage_flags( mut self, pushconstant_shader_stage_flags: ShaderStageFlags, ) -> Self { self.pushconstant_shader_stage_flags = pushconstant_shader_stage_flags; self } #[inline] pub fn pushconstant_offset(mut self, pushconstant_offset: u32) -> Self { self.pushconstant_offset = pushconstant_offset; self } #[inline] pub fn pushconstant_size(mut self, pushconstant_size: u32) -> Self { self.pushconstant_size = pushconstant_size; self } #[inline] pub fn indirect_state_flags(mut self, indirect_state_flags: IndirectStateFlagsNV) -> Self { self.indirect_state_flags = indirect_state_flags; self } #[inline] pub fn index_types(mut self, index_types: &'a [IndexType]) -> Self { self.index_type_count = index_types.len() as _; self.p_index_types = index_types.as_ptr(); self } #[inline] pub fn index_type_values(mut self, index_type_values: &'a [u32]) -> Self { self.index_type_count = index_type_values.len() as _; self.p_index_type_values = index_type_values.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct IndirectCommandsLayoutCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: IndirectCommandsLayoutUsageFlagsNV, pub pipeline_bind_point: PipelineBindPoint, pub token_count: u32, pub p_tokens: *const IndirectCommandsLayoutTokenNV<'a>, pub stream_count: u32, pub p_stream_strides: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for IndirectCommandsLayoutCreateInfoNV<'_> {} unsafe impl Sync for IndirectCommandsLayoutCreateInfoNV<'_> {} impl ::core::default::Default for IndirectCommandsLayoutCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: IndirectCommandsLayoutUsageFlagsNV::default(), pipeline_bind_point: PipelineBindPoint::default(), token_count: u32::default(), p_tokens: ::core::ptr::null(), stream_count: u32::default(), p_stream_strides: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for IndirectCommandsLayoutCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV; } impl<'a> IndirectCommandsLayoutCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: IndirectCommandsLayoutUsageFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn tokens(mut self, tokens: &'a [IndirectCommandsLayoutTokenNV<'a>]) -> Self { self.token_count = tokens.len() as _; self.p_tokens = tokens.as_ptr(); self } #[inline] pub fn stream_strides(mut self, stream_strides: &'a [u32]) -> Self { self.stream_count = stream_strides.len() as _; self.p_stream_strides = stream_strides.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GeneratedCommandsInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub pipeline_bind_point: PipelineBindPoint, pub pipeline: Pipeline, pub indirect_commands_layout: IndirectCommandsLayoutNV, pub stream_count: u32, pub p_streams: *const IndirectCommandsStreamNV, pub sequences_count: u32, pub preprocess_buffer: Buffer, pub preprocess_offset: DeviceSize, pub preprocess_size: DeviceSize, pub sequences_count_buffer: Buffer, pub sequences_count_offset: DeviceSize, pub sequences_index_buffer: Buffer, pub sequences_index_offset: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GeneratedCommandsInfoNV<'_> {} unsafe impl Sync for GeneratedCommandsInfoNV<'_> {} impl ::core::default::Default for GeneratedCommandsInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), pipeline_bind_point: PipelineBindPoint::default(), pipeline: Pipeline::default(), indirect_commands_layout: IndirectCommandsLayoutNV::default(), stream_count: u32::default(), p_streams: ::core::ptr::null(), sequences_count: u32::default(), preprocess_buffer: Buffer::default(), preprocess_offset: DeviceSize::default(), preprocess_size: DeviceSize::default(), sequences_count_buffer: Buffer::default(), sequences_count_offset: DeviceSize::default(), sequences_index_buffer: Buffer::default(), sequences_index_offset: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GeneratedCommandsInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GENERATED_COMMANDS_INFO_NV; } impl<'a> GeneratedCommandsInfoNV<'a> { #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn pipeline(mut self, pipeline: Pipeline) -> Self { self.pipeline = pipeline; self } #[inline] pub fn indirect_commands_layout( mut self, indirect_commands_layout: IndirectCommandsLayoutNV, ) -> Self { self.indirect_commands_layout = indirect_commands_layout; self } #[inline] pub fn streams(mut self, streams: &'a [IndirectCommandsStreamNV]) -> Self { self.stream_count = streams.len() as _; self.p_streams = streams.as_ptr(); self } #[inline] pub fn sequences_count(mut self, sequences_count: u32) -> Self { self.sequences_count = sequences_count; self } #[inline] pub fn preprocess_buffer(mut self, preprocess_buffer: Buffer) -> Self { self.preprocess_buffer = preprocess_buffer; self } #[inline] pub fn preprocess_offset(mut self, preprocess_offset: DeviceSize) -> Self { self.preprocess_offset = preprocess_offset; self } #[inline] pub fn preprocess_size(mut self, preprocess_size: DeviceSize) -> Self { self.preprocess_size = preprocess_size; self } #[inline] pub fn sequences_count_buffer(mut self, sequences_count_buffer: Buffer) -> Self { self.sequences_count_buffer = sequences_count_buffer; self } #[inline] pub fn sequences_count_offset(mut self, sequences_count_offset: DeviceSize) -> Self { self.sequences_count_offset = sequences_count_offset; self } #[inline] pub fn sequences_index_buffer(mut self, sequences_index_buffer: Buffer) -> Self { self.sequences_index_buffer = sequences_index_buffer; self } #[inline] pub fn sequences_index_offset(mut self, sequences_index_offset: DeviceSize) -> Self { self.sequences_index_offset = sequences_index_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GeneratedCommandsMemoryRequirementsInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub pipeline_bind_point: PipelineBindPoint, pub pipeline: Pipeline, pub indirect_commands_layout: IndirectCommandsLayoutNV, pub max_sequences_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GeneratedCommandsMemoryRequirementsInfoNV<'_> {} unsafe impl Sync for GeneratedCommandsMemoryRequirementsInfoNV<'_> {} impl ::core::default::Default for GeneratedCommandsMemoryRequirementsInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), pipeline_bind_point: PipelineBindPoint::default(), pipeline: Pipeline::default(), indirect_commands_layout: IndirectCommandsLayoutNV::default(), max_sequences_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GeneratedCommandsMemoryRequirementsInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV; } impl<'a> GeneratedCommandsMemoryRequirementsInfoNV<'a> { #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn pipeline(mut self, pipeline: Pipeline) -> Self { self.pipeline = pipeline; self } #[inline] pub fn indirect_commands_layout( mut self, indirect_commands_layout: IndirectCommandsLayoutNV, ) -> Self { self.indirect_commands_layout = indirect_commands_layout; self } #[inline] pub fn max_sequences_count(mut self, max_sequences_count: u32) -> Self { self.max_sequences_count = max_sequences_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineIndirectDeviceAddressInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub pipeline_bind_point: PipelineBindPoint, pub pipeline: Pipeline, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineIndirectDeviceAddressInfoNV<'_> {} unsafe impl Sync for PipelineIndirectDeviceAddressInfoNV<'_> {} impl ::core::default::Default for PipelineIndirectDeviceAddressInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), pipeline_bind_point: PipelineBindPoint::default(), pipeline: Pipeline::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineIndirectDeviceAddressInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV; } impl<'a> PipelineIndirectDeviceAddressInfoNV<'a> { #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn pipeline(mut self, pipeline: Pipeline) -> Self { self.pipeline = pipeline; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct BindPipelineIndirectCommandNV { pub pipeline_address: DeviceAddress, } impl BindPipelineIndirectCommandNV { #[inline] pub fn pipeline_address(mut self, pipeline_address: DeviceAddress) -> Self { self.pipeline_address = pipeline_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFeatures2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub features: PhysicalDeviceFeatures, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFeatures2<'_> {} unsafe impl Sync for PhysicalDeviceFeatures2<'_> {} impl ::core::default::Default for PhysicalDeviceFeatures2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), features: PhysicalDeviceFeatures::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFeatures2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FEATURES_2; } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFeatures2<'_> {} pub unsafe trait ExtendsPhysicalDeviceFeatures2 {} impl<'a> PhysicalDeviceFeatures2<'a> { #[inline] pub fn features(mut self, features: PhysicalDeviceFeatures) -> Self { self.features = features; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub properties: PhysicalDeviceProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceProperties2<'_> {} unsafe impl Sync for PhysicalDeviceProperties2<'_> {} impl ::core::default::Default for PhysicalDeviceProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), properties: PhysicalDeviceProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROPERTIES_2; } pub unsafe trait ExtendsPhysicalDeviceProperties2 {} impl<'a> PhysicalDeviceProperties2<'a> { #[inline] pub fn properties(mut self, properties: PhysicalDeviceProperties) -> Self { self.properties = properties; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FormatProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format_properties: FormatProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FormatProperties2<'_> {} unsafe impl Sync for FormatProperties2<'_> {} impl ::core::default::Default for FormatProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format_properties: FormatProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FormatProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FORMAT_PROPERTIES_2; } pub unsafe trait ExtendsFormatProperties2 {} impl<'a> FormatProperties2<'a> { #[inline] pub fn format_properties(mut self, format_properties: FormatProperties) -> Self { self.format_properties = format_properties; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageFormatProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_format_properties: ImageFormatProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageFormatProperties2<'_> {} unsafe impl Sync for ImageFormatProperties2<'_> {} impl ::core::default::Default for ImageFormatProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_format_properties: ImageFormatProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageFormatProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_PROPERTIES_2; } pub unsafe trait ExtendsImageFormatProperties2 {} impl<'a> ImageFormatProperties2<'a> { #[inline] pub fn image_format_properties( mut self, image_format_properties: ImageFormatProperties, ) -> Self { self.image_format_properties = image_format_properties; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageFormatInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub format: Format, pub ty: ImageType, pub tiling: ImageTiling, pub usage: ImageUsageFlags, pub flags: ImageCreateFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageFormatInfo2<'_> {} unsafe impl Sync for PhysicalDeviceImageFormatInfo2<'_> {} impl ::core::default::Default for PhysicalDeviceImageFormatInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), format: Format::default(), ty: ImageType::default(), tiling: ImageTiling::default(), usage: ImageUsageFlags::default(), flags: ImageCreateFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageFormatInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2; } pub unsafe trait ExtendsPhysicalDeviceImageFormatInfo2 {} impl<'a> PhysicalDeviceImageFormatInfo2<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn ty(mut self, ty: ImageType) -> Self { self.ty = ty; self } #[inline] pub fn tiling(mut self, tiling: ImageTiling) -> Self { self.tiling = tiling; self } #[inline] pub fn usage(mut self, usage: ImageUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn flags(mut self, flags: ImageCreateFlags) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub queue_family_properties: QueueFamilyProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyProperties2<'_> {} unsafe impl Sync for QueueFamilyProperties2<'_> {} impl ::core::default::Default for QueueFamilyProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), queue_family_properties: QueueFamilyProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_PROPERTIES_2; } pub unsafe trait ExtendsQueueFamilyProperties2 {} impl<'a> QueueFamilyProperties2<'a> { #[inline] pub fn queue_family_properties( mut self, queue_family_properties: QueueFamilyProperties, ) -> Self { self.queue_family_properties = queue_family_properties; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_properties: PhysicalDeviceMemoryProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMemoryProperties2<'_> {} unsafe impl Sync for PhysicalDeviceMemoryProperties2<'_> {} impl ::core::default::Default for PhysicalDeviceMemoryProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_properties: PhysicalDeviceMemoryProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMemoryProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; } pub unsafe trait ExtendsPhysicalDeviceMemoryProperties2 {} impl<'a> PhysicalDeviceMemoryProperties2<'a> { #[inline] pub fn memory_properties(mut self, memory_properties: PhysicalDeviceMemoryProperties) -> Self { self.memory_properties = memory_properties; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SparseImageFormatProperties2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub properties: SparseImageFormatProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SparseImageFormatProperties2<'_> {} unsafe impl Sync for SparseImageFormatProperties2<'_> {} impl ::core::default::Default for SparseImageFormatProperties2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), properties: SparseImageFormatProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SparseImageFormatProperties2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SPARSE_IMAGE_FORMAT_PROPERTIES_2; } impl<'a> SparseImageFormatProperties2<'a> { #[inline] pub fn properties(mut self, properties: SparseImageFormatProperties) -> Self { self.properties = properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSparseImageFormatInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub format: Format, pub ty: ImageType, pub samples: SampleCountFlags, pub usage: ImageUsageFlags, pub tiling: ImageTiling, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSparseImageFormatInfo2<'_> {} unsafe impl Sync for PhysicalDeviceSparseImageFormatInfo2<'_> {} impl ::core::default::Default for PhysicalDeviceSparseImageFormatInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), format: Format::default(), ty: ImageType::default(), samples: SampleCountFlags::default(), usage: ImageUsageFlags::default(), tiling: ImageTiling::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSparseImageFormatInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2; } impl<'a> PhysicalDeviceSparseImageFormatInfo2<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn ty(mut self, ty: ImageType) -> Self { self.ty = ty; self } #[inline] pub fn samples(mut self, samples: SampleCountFlags) -> Self { self.samples = samples; self } #[inline] pub fn usage(mut self, usage: ImageUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn tiling(mut self, tiling: ImageTiling) -> Self { self.tiling = tiling; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePushDescriptorPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_push_descriptors: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePushDescriptorPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDevicePushDescriptorPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePushDescriptorPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_push_descriptors: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePushDescriptorPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePushDescriptorPropertiesKHR<'_> {} impl<'a> PhysicalDevicePushDescriptorPropertiesKHR<'a> { #[inline] pub fn max_push_descriptors(mut self, max_push_descriptors: u32) -> Self { self.max_push_descriptors = max_push_descriptors; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ConformanceVersion { pub major: u8, pub minor: u8, pub subminor: u8, pub patch: u8, } impl ConformanceVersion { #[inline] pub fn major(mut self, major: u8) -> Self { self.major = major; self } #[inline] pub fn minor(mut self, minor: u8) -> Self { self.minor = minor; self } #[inline] pub fn subminor(mut self, subminor: u8) -> Self { self.subminor = subminor; self } #[inline] pub fn patch(mut self, patch: u8) -> Self { self.patch = patch; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDriverProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub driver_id: DriverId, pub driver_name: [c_char; MAX_DRIVER_NAME_SIZE], pub driver_info: [c_char; MAX_DRIVER_INFO_SIZE], pub conformance_version: ConformanceVersion, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDriverProperties<'_> {} unsafe impl Sync for PhysicalDeviceDriverProperties<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceDriverProperties<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceDriverProperties") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("driver_id", &self.driver_id) .field("driver_name", &self.driver_name_as_c_str()) .field("driver_info", &self.driver_info_as_c_str()) .field("conformance_version", &self.conformance_version) .finish() } } impl ::core::default::Default for PhysicalDeviceDriverProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), driver_id: DriverId::default(), driver_name: unsafe { ::core::mem::zeroed() }, driver_info: unsafe { ::core::mem::zeroed() }, conformance_version: ConformanceVersion::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDriverProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DRIVER_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDriverProperties<'_> {} impl<'a> PhysicalDeviceDriverProperties<'a> { #[inline] pub fn driver_id(mut self, driver_id: DriverId) -> Self { self.driver_id = driver_id; self } #[inline] pub fn driver_name( mut self, driver_name: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.driver_name, driver_name).map(|()| self) } #[inline] pub fn driver_name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.driver_name) } #[inline] pub fn driver_info( mut self, driver_info: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.driver_info, driver_info).map(|()| self) } #[inline] pub fn driver_info_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.driver_info) } #[inline] pub fn conformance_version(mut self, conformance_version: ConformanceVersion) -> Self { self.conformance_version = conformance_version; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentRegionsKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_regions: *const PresentRegionKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentRegionsKHR<'_> {} unsafe impl Sync for PresentRegionsKHR<'_> {} impl ::core::default::Default for PresentRegionsKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PresentRegionsKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_REGIONS_KHR; } unsafe impl ExtendsPresentInfoKHR for PresentRegionsKHR<'_> {} impl<'a> PresentRegionsKHR<'a> { #[inline] pub fn regions(mut self, regions: &'a [PresentRegionKHR<'a>]) -> Self { self.swapchain_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentRegionKHR<'a> { pub rectangle_count: u32, pub p_rectangles: *const RectLayerKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentRegionKHR<'_> {} unsafe impl Sync for PresentRegionKHR<'_> {} impl ::core::default::Default for PresentRegionKHR<'_> { #[inline] fn default() -> Self { Self { rectangle_count: u32::default(), p_rectangles: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> PresentRegionKHR<'a> { #[inline] pub fn rectangles(mut self, rectangles: &'a [RectLayerKHR]) -> Self { self.rectangle_count = rectangles.len() as _; self.p_rectangles = rectangles.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct RectLayerKHR { pub offset: Offset2D, pub extent: Extent2D, pub layer: u32, } impl RectLayerKHR { #[inline] pub fn offset(mut self, offset: Offset2D) -> Self { self.offset = offset; self } #[inline] pub fn extent(mut self, extent: Extent2D) -> Self { self.extent = extent; self } #[inline] pub fn layer(mut self, layer: u32) -> Self { self.layer = layer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVariablePointersFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub variable_pointers_storage_buffer: Bool32, pub variable_pointers: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVariablePointersFeatures<'_> {} unsafe impl Sync for PhysicalDeviceVariablePointersFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceVariablePointersFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), variable_pointers_storage_buffer: Bool32::default(), variable_pointers: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVariablePointersFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVariablePointersFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVariablePointersFeatures<'_> {} impl<'a> PhysicalDeviceVariablePointersFeatures<'a> { #[inline] pub fn variable_pointers_storage_buffer( mut self, variable_pointers_storage_buffer: bool, ) -> Self { self.variable_pointers_storage_buffer = variable_pointers_storage_buffer.into(); self } #[inline] pub fn variable_pointers(mut self, variable_pointers: bool) -> Self { self.variable_pointers = variable_pointers.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ExternalMemoryProperties { pub external_memory_features: ExternalMemoryFeatureFlags, pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlags, pub compatible_handle_types: ExternalMemoryHandleTypeFlags, } impl ExternalMemoryProperties { #[inline] pub fn external_memory_features( mut self, external_memory_features: ExternalMemoryFeatureFlags, ) -> Self { self.external_memory_features = external_memory_features; self } #[inline] pub fn export_from_imported_handle_types( mut self, export_from_imported_handle_types: ExternalMemoryHandleTypeFlags, ) -> Self { self.export_from_imported_handle_types = export_from_imported_handle_types; self } #[inline] pub fn compatible_handle_types( mut self, compatible_handle_types: ExternalMemoryHandleTypeFlags, ) -> Self { self.compatible_handle_types = compatible_handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalImageFormatInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalImageFormatInfo<'_> {} unsafe impl Sync for PhysicalDeviceExternalImageFormatInfo<'_> {} impl ::core::default::Default for PhysicalDeviceExternalImageFormatInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalImageFormatInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO; } unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceExternalImageFormatInfo<'_> {} impl<'a> PhysicalDeviceExternalImageFormatInfo<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalImageFormatProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_memory_properties: ExternalMemoryProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalImageFormatProperties<'_> {} unsafe impl Sync for ExternalImageFormatProperties<'_> {} impl ::core::default::Default for ExternalImageFormatProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_memory_properties: ExternalMemoryProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalImageFormatProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_IMAGE_FORMAT_PROPERTIES; } unsafe impl ExtendsImageFormatProperties2 for ExternalImageFormatProperties<'_> {} impl<'a> ExternalImageFormatProperties<'a> { #[inline] pub fn external_memory_properties( mut self, external_memory_properties: ExternalMemoryProperties, ) -> Self { self.external_memory_properties = external_memory_properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalBufferInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: BufferCreateFlags, pub usage: BufferUsageFlags, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalBufferInfo<'_> {} unsafe impl Sync for PhysicalDeviceExternalBufferInfo<'_> {} impl ::core::default::Default for PhysicalDeviceExternalBufferInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: BufferCreateFlags::default(), usage: BufferUsageFlags::default(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalBufferInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO; } pub unsafe trait ExtendsPhysicalDeviceExternalBufferInfo {} impl<'a> PhysicalDeviceExternalBufferInfo<'a> { #[inline] pub fn flags(mut self, flags: BufferCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn usage(mut self, usage: BufferUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalBufferProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_memory_properties: ExternalMemoryProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalBufferProperties<'_> {} unsafe impl Sync for ExternalBufferProperties<'_> {} impl ::core::default::Default for ExternalBufferProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_memory_properties: ExternalMemoryProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalBufferProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_BUFFER_PROPERTIES; } impl<'a> ExternalBufferProperties<'a> { #[inline] pub fn external_memory_properties( mut self, external_memory_properties: ExternalMemoryProperties, ) -> Self { self.external_memory_properties = external_memory_properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceIDProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_uuid: [u8; UUID_SIZE], pub driver_uuid: [u8; UUID_SIZE], pub device_luid: [u8; LUID_SIZE], pub device_node_mask: u32, pub device_luid_valid: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceIDProperties<'_> {} unsafe impl Sync for PhysicalDeviceIDProperties<'_> {} impl ::core::default::Default for PhysicalDeviceIDProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_uuid: unsafe { ::core::mem::zeroed() }, driver_uuid: unsafe { ::core::mem::zeroed() }, device_luid: unsafe { ::core::mem::zeroed() }, device_node_mask: u32::default(), device_luid_valid: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceIDProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ID_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceIDProperties<'_> {} impl<'a> PhysicalDeviceIDProperties<'a> { #[inline] pub fn device_uuid(mut self, device_uuid: [u8; UUID_SIZE]) -> Self { self.device_uuid = device_uuid; self } #[inline] pub fn driver_uuid(mut self, driver_uuid: [u8; UUID_SIZE]) -> Self { self.driver_uuid = driver_uuid; self } #[inline] pub fn device_luid(mut self, device_luid: [u8; LUID_SIZE]) -> Self { self.device_luid = device_luid; self } #[inline] pub fn device_node_mask(mut self, device_node_mask: u32) -> Self { self.device_node_mask = device_node_mask; self } #[inline] pub fn device_luid_valid(mut self, device_luid_valid: bool) -> Self { self.device_luid_valid = device_luid_valid.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalMemoryImageCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalMemoryImageCreateInfo<'_> {} unsafe impl Sync for ExternalMemoryImageCreateInfo<'_> {} impl ::core::default::Default for ExternalMemoryImageCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalMemoryImageCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO; } unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfo<'_> {} impl<'a> ExternalMemoryImageCreateInfo<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalMemoryBufferCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalMemoryBufferCreateInfo<'_> {} unsafe impl Sync for ExternalMemoryBufferCreateInfo<'_> {} impl ::core::default::Default for ExternalMemoryBufferCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalMemoryBufferCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_BUFFER_CREATE_INFO; } unsafe impl ExtendsBufferCreateInfo for ExternalMemoryBufferCreateInfo<'_> {} impl<'a> ExternalMemoryBufferCreateInfo<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMemoryAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMemoryAllocateInfo<'_> {} unsafe impl Sync for ExportMemoryAllocateInfo<'_> {} impl ::core::default::Default for ExportMemoryAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMemoryAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_ALLOCATE_INFO; } unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfo<'_> {} impl<'a> ExportMemoryAllocateInfo<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlags, pub handle: HANDLE, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryWin32HandleInfoKHR<'_> {} unsafe impl Sync for ImportMemoryWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ImportMemoryWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlags::default(), handle: unsafe { ::core::mem::zeroed() }, name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoKHR<'_> {} impl<'a> ImportMemoryWin32HandleInfoKHR<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn handle(mut self, handle: HANDLE) -> Self { self.handle = handle; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMemoryWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_attributes: *const SECURITY_ATTRIBUTES, pub dw_access: DWORD, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMemoryWin32HandleInfoKHR<'_> {} unsafe impl Sync for ExportMemoryWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ExportMemoryWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_attributes: ::core::ptr::null(), dw_access: DWORD::default(), name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMemoryWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR; } unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoKHR<'_> {} impl<'a> ExportMemoryWin32HandleInfoKHR<'a> { #[inline] pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self { self.p_attributes = attributes; self } #[inline] pub fn dw_access(mut self, dw_access: DWORD) -> Self { self.dw_access = dw_access; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryZirconHandleInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlags, pub handle: zx_handle_t, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryZirconHandleInfoFUCHSIA<'_> {} unsafe impl Sync for ImportMemoryZirconHandleInfoFUCHSIA<'_> {} impl ::core::default::Default for ImportMemoryZirconHandleInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlags::default(), handle: zx_handle_t::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryZirconHandleInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryZirconHandleInfoFUCHSIA<'_> {} impl<'a> ImportMemoryZirconHandleInfoFUCHSIA<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn handle(mut self, handle: zx_handle_t) -> Self { self.handle = handle; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryZirconHandlePropertiesFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryZirconHandlePropertiesFUCHSIA<'_> {} unsafe impl Sync for MemoryZirconHandlePropertiesFUCHSIA<'_> {} impl ::core::default::Default for MemoryZirconHandlePropertiesFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryZirconHandlePropertiesFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA; } impl<'a> MemoryZirconHandlePropertiesFUCHSIA<'a> { #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryGetZirconHandleInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryGetZirconHandleInfoFUCHSIA<'_> {} unsafe impl Sync for MemoryGetZirconHandleInfoFUCHSIA<'_> {} impl ::core::default::Default for MemoryGetZirconHandleInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryGetZirconHandleInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA; } impl<'a> MemoryGetZirconHandleInfoFUCHSIA<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryWin32HandlePropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryWin32HandlePropertiesKHR<'_> {} unsafe impl Sync for MemoryWin32HandlePropertiesKHR<'_> {} impl ::core::default::Default for MemoryWin32HandlePropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryWin32HandlePropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_WIN32_HANDLE_PROPERTIES_KHR; } impl<'a> MemoryWin32HandlePropertiesKHR<'a> { #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryGetWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryGetWin32HandleInfoKHR<'_> {} unsafe impl Sync for MemoryGetWin32HandleInfoKHR<'_> {} impl ::core::default::Default for MemoryGetWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryGetWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_WIN32_HANDLE_INFO_KHR; } impl<'a> MemoryGetWin32HandleInfoKHR<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlags, pub fd: c_int, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryFdInfoKHR<'_> {} unsafe impl Sync for ImportMemoryFdInfoKHR<'_> {} impl ::core::default::Default for ImportMemoryFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlags::default(), fd: c_int::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_FD_INFO_KHR; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryFdInfoKHR<'_> {} impl<'a> ImportMemoryFdInfoKHR<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn fd(mut self, fd: c_int) -> Self { self.fd = fd; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryFdPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryFdPropertiesKHR<'_> {} unsafe impl Sync for MemoryFdPropertiesKHR<'_> {} impl ::core::default::Default for MemoryFdPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryFdPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_FD_PROPERTIES_KHR; } impl<'a> MemoryFdPropertiesKHR<'a> { #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryGetFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryGetFdInfoKHR<'_> {} unsafe impl Sync for MemoryGetFdInfoKHR<'_> {} impl ::core::default::Default for MemoryGetFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryGetFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_FD_INFO_KHR; } impl<'a> MemoryGetFdInfoKHR<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct Win32KeyedMutexAcquireReleaseInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acquire_count: u32, pub p_acquire_syncs: *const DeviceMemory, pub p_acquire_keys: *const u64, pub p_acquire_timeouts: *const u32, pub release_count: u32, pub p_release_syncs: *const DeviceMemory, pub p_release_keys: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for Win32KeyedMutexAcquireReleaseInfoKHR<'_> {} unsafe impl Sync for Win32KeyedMutexAcquireReleaseInfoKHR<'_> {} impl ::core::default::Default for Win32KeyedMutexAcquireReleaseInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acquire_count: u32::default(), p_acquire_syncs: ::core::ptr::null(), p_acquire_keys: ::core::ptr::null(), p_acquire_timeouts: ::core::ptr::null(), release_count: u32::default(), p_release_syncs: ::core::ptr::null(), p_release_keys: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for Win32KeyedMutexAcquireReleaseInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR; } unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHR<'_> {} unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoKHR<'_> {} impl<'a> Win32KeyedMutexAcquireReleaseInfoKHR<'a> { #[inline] pub fn acquire_syncs(mut self, acquire_syncs: &'a [DeviceMemory]) -> Self { self.acquire_count = acquire_syncs.len() as _; self.p_acquire_syncs = acquire_syncs.as_ptr(); self } #[inline] pub fn acquire_keys(mut self, acquire_keys: &'a [u64]) -> Self { self.acquire_count = acquire_keys.len() as _; self.p_acquire_keys = acquire_keys.as_ptr(); self } #[inline] pub fn acquire_timeouts(mut self, acquire_timeouts: &'a [u32]) -> Self { self.acquire_count = acquire_timeouts.len() as _; self.p_acquire_timeouts = acquire_timeouts.as_ptr(); self } #[inline] pub fn release_syncs(mut self, release_syncs: &'a [DeviceMemory]) -> Self { self.release_count = release_syncs.len() as _; self.p_release_syncs = release_syncs.as_ptr(); self } #[inline] pub fn release_keys(mut self, release_keys: &'a [u64]) -> Self { self.release_count = release_keys.len() as _; self.p_release_keys = release_keys.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalSemaphoreInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalSemaphoreInfo<'_> {} unsafe impl Sync for PhysicalDeviceExternalSemaphoreInfo<'_> {} impl ::core::default::Default for PhysicalDeviceExternalSemaphoreInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalSemaphoreInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO; } pub unsafe trait ExtendsPhysicalDeviceExternalSemaphoreInfo {} impl<'a> PhysicalDeviceExternalSemaphoreInfo<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalSemaphoreProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags, pub compatible_handle_types: ExternalSemaphoreHandleTypeFlags, pub external_semaphore_features: ExternalSemaphoreFeatureFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalSemaphoreProperties<'_> {} unsafe impl Sync for ExternalSemaphoreProperties<'_> {} impl ::core::default::Default for ExternalSemaphoreProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags::default(), compatible_handle_types: ExternalSemaphoreHandleTypeFlags::default(), external_semaphore_features: ExternalSemaphoreFeatureFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalSemaphoreProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_SEMAPHORE_PROPERTIES; } impl<'a> ExternalSemaphoreProperties<'a> { #[inline] pub fn export_from_imported_handle_types( mut self, export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags, ) -> Self { self.export_from_imported_handle_types = export_from_imported_handle_types; self } #[inline] pub fn compatible_handle_types( mut self, compatible_handle_types: ExternalSemaphoreHandleTypeFlags, ) -> Self { self.compatible_handle_types = compatible_handle_types; self } #[inline] pub fn external_semaphore_features( mut self, external_semaphore_features: ExternalSemaphoreFeatureFlags, ) -> Self { self.external_semaphore_features = external_semaphore_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportSemaphoreCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalSemaphoreHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportSemaphoreCreateInfo<'_> {} unsafe impl Sync for ExportSemaphoreCreateInfo<'_> {} impl ::core::default::Default for ExportSemaphoreCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalSemaphoreHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportSemaphoreCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_SEMAPHORE_CREATE_INFO; } unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreCreateInfo<'_> {} impl<'a> ExportSemaphoreCreateInfo<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportSemaphoreWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub flags: SemaphoreImportFlags, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub handle: HANDLE, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportSemaphoreWin32HandleInfoKHR<'_> {} unsafe impl Sync for ImportSemaphoreWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ImportSemaphoreWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), flags: SemaphoreImportFlags::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), handle: unsafe { ::core::mem::zeroed() }, name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportSemaphoreWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR; } impl<'a> ImportSemaphoreWin32HandleInfoKHR<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self { self.flags = flags; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn handle(mut self, handle: HANDLE) -> Self { self.handle = handle; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportSemaphoreWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_attributes: *const SECURITY_ATTRIBUTES, pub dw_access: DWORD, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportSemaphoreWin32HandleInfoKHR<'_> {} unsafe impl Sync for ExportSemaphoreWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ExportSemaphoreWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_attributes: ::core::ptr::null(), dw_access: DWORD::default(), name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportSemaphoreWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR; } unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreWin32HandleInfoKHR<'_> {} impl<'a> ExportSemaphoreWin32HandleInfoKHR<'a> { #[inline] pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self { self.p_attributes = attributes; self } #[inline] pub fn dw_access(mut self, dw_access: DWORD) -> Self { self.dw_access = dw_access; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct D3D12FenceSubmitInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_values_count: u32, pub p_wait_semaphore_values: *const u64, pub signal_semaphore_values_count: u32, pub p_signal_semaphore_values: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for D3D12FenceSubmitInfoKHR<'_> {} unsafe impl Sync for D3D12FenceSubmitInfoKHR<'_> {} impl ::core::default::Default for D3D12FenceSubmitInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_values_count: u32::default(), p_wait_semaphore_values: ::core::ptr::null(), signal_semaphore_values_count: u32::default(), p_signal_semaphore_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for D3D12FenceSubmitInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::D3D12_FENCE_SUBMIT_INFO_KHR; } unsafe impl ExtendsSubmitInfo for D3D12FenceSubmitInfoKHR<'_> {} impl<'a> D3D12FenceSubmitInfoKHR<'a> { #[inline] pub fn wait_semaphore_values(mut self, wait_semaphore_values: &'a [u64]) -> Self { self.wait_semaphore_values_count = wait_semaphore_values.len() as _; self.p_wait_semaphore_values = wait_semaphore_values.as_ptr(); self } #[inline] pub fn signal_semaphore_values(mut self, signal_semaphore_values: &'a [u64]) -> Self { self.signal_semaphore_values_count = signal_semaphore_values.len() as _; self.p_signal_semaphore_values = signal_semaphore_values.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreGetWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreGetWin32HandleInfoKHR<'_> {} unsafe impl Sync for SemaphoreGetWin32HandleInfoKHR<'_> {} impl ::core::default::Default for SemaphoreGetWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreGetWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR; } impl<'a> SemaphoreGetWin32HandleInfoKHR<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportSemaphoreFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub flags: SemaphoreImportFlags, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub fd: c_int, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportSemaphoreFdInfoKHR<'_> {} unsafe impl Sync for ImportSemaphoreFdInfoKHR<'_> {} impl ::core::default::Default for ImportSemaphoreFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), flags: SemaphoreImportFlags::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), fd: c_int::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportSemaphoreFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SEMAPHORE_FD_INFO_KHR; } impl<'a> ImportSemaphoreFdInfoKHR<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self { self.flags = flags; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn fd(mut self, fd: c_int) -> Self { self.fd = fd; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreGetFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreGetFdInfoKHR<'_> {} unsafe impl Sync for SemaphoreGetFdInfoKHR<'_> {} impl ::core::default::Default for SemaphoreGetFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreGetFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_FD_INFO_KHR; } impl<'a> SemaphoreGetFdInfoKHR<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportSemaphoreZirconHandleInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub flags: SemaphoreImportFlags, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub zircon_handle: zx_handle_t, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportSemaphoreZirconHandleInfoFUCHSIA<'_> {} unsafe impl Sync for ImportSemaphoreZirconHandleInfoFUCHSIA<'_> {} impl ::core::default::Default for ImportSemaphoreZirconHandleInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), flags: SemaphoreImportFlags::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), zircon_handle: zx_handle_t::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportSemaphoreZirconHandleInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA; } impl<'a> ImportSemaphoreZirconHandleInfoFUCHSIA<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self { self.flags = flags; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn zircon_handle(mut self, zircon_handle: zx_handle_t) -> Self { self.zircon_handle = zircon_handle; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreGetZirconHandleInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub handle_type: ExternalSemaphoreHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreGetZirconHandleInfoFUCHSIA<'_> {} unsafe impl Sync for SemaphoreGetZirconHandleInfoFUCHSIA<'_> {} impl ::core::default::Default for SemaphoreGetZirconHandleInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), handle_type: ExternalSemaphoreHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreGetZirconHandleInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA; } impl<'a> SemaphoreGetZirconHandleInfoFUCHSIA<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalFenceInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalFenceHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalFenceInfo<'_> {} unsafe impl Sync for PhysicalDeviceExternalFenceInfo<'_> {} impl ::core::default::Default for PhysicalDeviceExternalFenceInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalFenceHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalFenceInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO; } impl<'a> PhysicalDeviceExternalFenceInfo<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalFenceProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub export_from_imported_handle_types: ExternalFenceHandleTypeFlags, pub compatible_handle_types: ExternalFenceHandleTypeFlags, pub external_fence_features: ExternalFenceFeatureFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalFenceProperties<'_> {} unsafe impl Sync for ExternalFenceProperties<'_> {} impl ::core::default::Default for ExternalFenceProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), export_from_imported_handle_types: ExternalFenceHandleTypeFlags::default(), compatible_handle_types: ExternalFenceHandleTypeFlags::default(), external_fence_features: ExternalFenceFeatureFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalFenceProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_FENCE_PROPERTIES; } impl<'a> ExternalFenceProperties<'a> { #[inline] pub fn export_from_imported_handle_types( mut self, export_from_imported_handle_types: ExternalFenceHandleTypeFlags, ) -> Self { self.export_from_imported_handle_types = export_from_imported_handle_types; self } #[inline] pub fn compatible_handle_types( mut self, compatible_handle_types: ExternalFenceHandleTypeFlags, ) -> Self { self.compatible_handle_types = compatible_handle_types; self } #[inline] pub fn external_fence_features( mut self, external_fence_features: ExternalFenceFeatureFlags, ) -> Self { self.external_fence_features = external_fence_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportFenceCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_types: ExternalFenceHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportFenceCreateInfo<'_> {} unsafe impl Sync for ExportFenceCreateInfo<'_> {} impl ::core::default::Default for ExportFenceCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_types: ExternalFenceHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportFenceCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_FENCE_CREATE_INFO; } unsafe impl ExtendsFenceCreateInfo for ExportFenceCreateInfo<'_> {} impl<'a> ExportFenceCreateInfo<'a> { #[inline] pub fn handle_types(mut self, handle_types: ExternalFenceHandleTypeFlags) -> Self { self.handle_types = handle_types; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportFenceWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fence: Fence, pub flags: FenceImportFlags, pub handle_type: ExternalFenceHandleTypeFlags, pub handle: HANDLE, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportFenceWin32HandleInfoKHR<'_> {} unsafe impl Sync for ImportFenceWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ImportFenceWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fence: Fence::default(), flags: FenceImportFlags::default(), handle_type: ExternalFenceHandleTypeFlags::default(), handle: unsafe { ::core::mem::zeroed() }, name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportFenceWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR; } impl<'a> ImportFenceWin32HandleInfoKHR<'a> { #[inline] pub fn fence(mut self, fence: Fence) -> Self { self.fence = fence; self } #[inline] pub fn flags(mut self, flags: FenceImportFlags) -> Self { self.flags = flags; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn handle(mut self, handle: HANDLE) -> Self { self.handle = handle; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportFenceWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_attributes: *const SECURITY_ATTRIBUTES, pub dw_access: DWORD, pub name: LPCWSTR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportFenceWin32HandleInfoKHR<'_> {} unsafe impl Sync for ExportFenceWin32HandleInfoKHR<'_> {} impl ::core::default::Default for ExportFenceWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_attributes: ::core::ptr::null(), dw_access: DWORD::default(), name: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportFenceWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR; } unsafe impl ExtendsFenceCreateInfo for ExportFenceWin32HandleInfoKHR<'_> {} impl<'a> ExportFenceWin32HandleInfoKHR<'a> { #[inline] pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self { self.p_attributes = attributes; self } #[inline] pub fn dw_access(mut self, dw_access: DWORD) -> Self { self.dw_access = dw_access; self } #[inline] pub fn name(mut self, name: LPCWSTR) -> Self { self.name = name; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FenceGetWin32HandleInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fence: Fence, pub handle_type: ExternalFenceHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FenceGetWin32HandleInfoKHR<'_> {} unsafe impl Sync for FenceGetWin32HandleInfoKHR<'_> {} impl ::core::default::Default for FenceGetWin32HandleInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fence: Fence::default(), handle_type: ExternalFenceHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FenceGetWin32HandleInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FENCE_GET_WIN32_HANDLE_INFO_KHR; } impl<'a> FenceGetWin32HandleInfoKHR<'a> { #[inline] pub fn fence(mut self, fence: Fence) -> Self { self.fence = fence; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportFenceFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fence: Fence, pub flags: FenceImportFlags, pub handle_type: ExternalFenceHandleTypeFlags, pub fd: c_int, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportFenceFdInfoKHR<'_> {} unsafe impl Sync for ImportFenceFdInfoKHR<'_> {} impl ::core::default::Default for ImportFenceFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fence: Fence::default(), flags: FenceImportFlags::default(), handle_type: ExternalFenceHandleTypeFlags::default(), fd: c_int::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportFenceFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_FENCE_FD_INFO_KHR; } impl<'a> ImportFenceFdInfoKHR<'a> { #[inline] pub fn fence(mut self, fence: Fence) -> Self { self.fence = fence; self } #[inline] pub fn flags(mut self, flags: FenceImportFlags) -> Self { self.flags = flags; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn fd(mut self, fd: c_int) -> Self { self.fd = fd; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FenceGetFdInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fence: Fence, pub handle_type: ExternalFenceHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FenceGetFdInfoKHR<'_> {} unsafe impl Sync for FenceGetFdInfoKHR<'_> {} impl ::core::default::Default for FenceGetFdInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fence: Fence::default(), handle_type: ExternalFenceHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FenceGetFdInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FENCE_GET_FD_INFO_KHR; } impl<'a> FenceGetFdInfoKHR<'a> { #[inline] pub fn fence(mut self, fence: Fence) -> Self { self.fence = fence; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiviewFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub multiview: Bool32, pub multiview_geometry_shader: Bool32, pub multiview_tessellation_shader: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiviewFeatures<'_> {} unsafe impl Sync for PhysicalDeviceMultiviewFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceMultiviewFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), multiview: Bool32::default(), multiview_geometry_shader: Bool32::default(), multiview_tessellation_shader: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiviewFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewFeatures<'_> {} impl<'a> PhysicalDeviceMultiviewFeatures<'a> { #[inline] pub fn multiview(mut self, multiview: bool) -> Self { self.multiview = multiview.into(); self } #[inline] pub fn multiview_geometry_shader(mut self, multiview_geometry_shader: bool) -> Self { self.multiview_geometry_shader = multiview_geometry_shader.into(); self } #[inline] pub fn multiview_tessellation_shader(mut self, multiview_tessellation_shader: bool) -> Self { self.multiview_tessellation_shader = multiview_tessellation_shader.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiviewProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_multiview_view_count: u32, pub max_multiview_instance_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiviewProperties<'_> {} unsafe impl Sync for PhysicalDeviceMultiviewProperties<'_> {} impl ::core::default::Default for PhysicalDeviceMultiviewProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_multiview_view_count: u32::default(), max_multiview_instance_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiviewProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewProperties<'_> {} impl<'a> PhysicalDeviceMultiviewProperties<'a> { #[inline] pub fn max_multiview_view_count(mut self, max_multiview_view_count: u32) -> Self { self.max_multiview_view_count = max_multiview_view_count; self } #[inline] pub fn max_multiview_instance_index(mut self, max_multiview_instance_index: u32) -> Self { self.max_multiview_instance_index = max_multiview_instance_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassMultiviewCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub subpass_count: u32, pub p_view_masks: *const u32, pub dependency_count: u32, pub p_view_offsets: *const i32, pub correlation_mask_count: u32, pub p_correlation_masks: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassMultiviewCreateInfo<'_> {} unsafe impl Sync for RenderPassMultiviewCreateInfo<'_> {} impl ::core::default::Default for RenderPassMultiviewCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), subpass_count: u32::default(), p_view_masks: ::core::ptr::null(), dependency_count: u32::default(), p_view_offsets: ::core::ptr::null(), correlation_mask_count: u32::default(), p_correlation_masks: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassMultiviewCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_MULTIVIEW_CREATE_INFO; } unsafe impl ExtendsRenderPassCreateInfo for RenderPassMultiviewCreateInfo<'_> {} impl<'a> RenderPassMultiviewCreateInfo<'a> { #[inline] pub fn view_masks(mut self, view_masks: &'a [u32]) -> Self { self.subpass_count = view_masks.len() as _; self.p_view_masks = view_masks.as_ptr(); self } #[inline] pub fn view_offsets(mut self, view_offsets: &'a [i32]) -> Self { self.dependency_count = view_offsets.len() as _; self.p_view_offsets = view_offsets.as_ptr(); self } #[inline] pub fn correlation_masks(mut self, correlation_masks: &'a [u32]) -> Self { self.correlation_mask_count = correlation_masks.len() as _; self.p_correlation_masks = correlation_masks.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceCapabilities2EXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_image_count: u32, pub max_image_count: u32, pub current_extent: Extent2D, pub min_image_extent: Extent2D, pub max_image_extent: Extent2D, pub max_image_array_layers: u32, pub supported_transforms: SurfaceTransformFlagsKHR, pub current_transform: SurfaceTransformFlagsKHR, pub supported_composite_alpha: CompositeAlphaFlagsKHR, pub supported_usage_flags: ImageUsageFlags, pub supported_surface_counters: SurfaceCounterFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceCapabilities2EXT<'_> {} unsafe impl Sync for SurfaceCapabilities2EXT<'_> {} impl ::core::default::Default for SurfaceCapabilities2EXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_image_count: u32::default(), max_image_count: u32::default(), current_extent: Extent2D::default(), min_image_extent: Extent2D::default(), max_image_extent: Extent2D::default(), max_image_array_layers: u32::default(), supported_transforms: SurfaceTransformFlagsKHR::default(), current_transform: SurfaceTransformFlagsKHR::default(), supported_composite_alpha: CompositeAlphaFlagsKHR::default(), supported_usage_flags: ImageUsageFlags::default(), supported_surface_counters: SurfaceCounterFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceCapabilities2EXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_2_EXT; } impl<'a> SurfaceCapabilities2EXT<'a> { #[inline] pub fn min_image_count(mut self, min_image_count: u32) -> Self { self.min_image_count = min_image_count; self } #[inline] pub fn max_image_count(mut self, max_image_count: u32) -> Self { self.max_image_count = max_image_count; self } #[inline] pub fn current_extent(mut self, current_extent: Extent2D) -> Self { self.current_extent = current_extent; self } #[inline] pub fn min_image_extent(mut self, min_image_extent: Extent2D) -> Self { self.min_image_extent = min_image_extent; self } #[inline] pub fn max_image_extent(mut self, max_image_extent: Extent2D) -> Self { self.max_image_extent = max_image_extent; self } #[inline] pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self { self.max_image_array_layers = max_image_array_layers; self } #[inline] pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self { self.supported_transforms = supported_transforms; self } #[inline] pub fn current_transform(mut self, current_transform: SurfaceTransformFlagsKHR) -> Self { self.current_transform = current_transform; self } #[inline] pub fn supported_composite_alpha( mut self, supported_composite_alpha: CompositeAlphaFlagsKHR, ) -> Self { self.supported_composite_alpha = supported_composite_alpha; self } #[inline] pub fn supported_usage_flags(mut self, supported_usage_flags: ImageUsageFlags) -> Self { self.supported_usage_flags = supported_usage_flags; self } #[inline] pub fn supported_surface_counters( mut self, supported_surface_counters: SurfaceCounterFlagsEXT, ) -> Self { self.supported_surface_counters = supported_surface_counters; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPowerInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub power_state: DisplayPowerStateEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPowerInfoEXT<'_> {} unsafe impl Sync for DisplayPowerInfoEXT<'_> {} impl ::core::default::Default for DisplayPowerInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), power_state: DisplayPowerStateEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayPowerInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_POWER_INFO_EXT; } impl<'a> DisplayPowerInfoEXT<'a> { #[inline] pub fn power_state(mut self, power_state: DisplayPowerStateEXT) -> Self { self.power_state = power_state; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceEventInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_event: DeviceEventTypeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceEventInfoEXT<'_> {} unsafe impl Sync for DeviceEventInfoEXT<'_> {} impl ::core::default::Default for DeviceEventInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_event: DeviceEventTypeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceEventInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_EVENT_INFO_EXT; } impl<'a> DeviceEventInfoEXT<'a> { #[inline] pub fn device_event(mut self, device_event: DeviceEventTypeEXT) -> Self { self.device_event = device_event; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayEventInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub display_event: DisplayEventTypeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayEventInfoEXT<'_> {} unsafe impl Sync for DisplayEventInfoEXT<'_> {} impl ::core::default::Default for DisplayEventInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), display_event: DisplayEventTypeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayEventInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_EVENT_INFO_EXT; } impl<'a> DisplayEventInfoEXT<'a> { #[inline] pub fn display_event(mut self, display_event: DisplayEventTypeEXT) -> Self { self.display_event = display_event; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainCounterCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub surface_counters: SurfaceCounterFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainCounterCreateInfoEXT<'_> {} unsafe impl Sync for SwapchainCounterCreateInfoEXT<'_> {} impl ::core::default::Default for SwapchainCounterCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), surface_counters: SurfaceCounterFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainCounterCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_COUNTER_CREATE_INFO_EXT; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainCounterCreateInfoEXT<'_> {} impl<'a> SwapchainCounterCreateInfoEXT<'a> { #[inline] pub fn surface_counters(mut self, surface_counters: SurfaceCounterFlagsEXT) -> Self { self.surface_counters = surface_counters; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceGroupProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub physical_device_count: u32, pub physical_devices: [PhysicalDevice; MAX_DEVICE_GROUP_SIZE], pub subset_allocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceGroupProperties<'_> {} unsafe impl Sync for PhysicalDeviceGroupProperties<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceGroupProperties<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceGroupProperties") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("physical_device_count", &self.physical_device_count) .field("physical_devices", &self.physical_devices_as_slice()) .field("subset_allocation", &self.subset_allocation) .finish() } } impl ::core::default::Default for PhysicalDeviceGroupProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), physical_device_count: u32::default(), physical_devices: unsafe { ::core::mem::zeroed() }, subset_allocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceGroupProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_GROUP_PROPERTIES; } impl<'a> PhysicalDeviceGroupProperties<'a> { #[inline] pub fn physical_devices(mut self, physical_devices: &'_ [PhysicalDevice]) -> Self { self.physical_device_count = physical_devices.len() as _; self.physical_devices[..physical_devices.len()].copy_from_slice(physical_devices); self } #[inline] pub fn physical_devices_as_slice(&self) -> &[PhysicalDevice] { &self.physical_devices[..self.physical_device_count as _] } #[inline] pub fn subset_allocation(mut self, subset_allocation: bool) -> Self { self.subset_allocation = subset_allocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryAllocateFlagsInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: MemoryAllocateFlags, pub device_mask: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryAllocateFlagsInfo<'_> {} unsafe impl Sync for MemoryAllocateFlagsInfo<'_> {} impl ::core::default::Default for MemoryAllocateFlagsInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: MemoryAllocateFlags::default(), device_mask: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryAllocateFlagsInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ALLOCATE_FLAGS_INFO; } unsafe impl ExtendsMemoryAllocateInfo for MemoryAllocateFlagsInfo<'_> {} impl<'a> MemoryAllocateFlagsInfo<'a> { #[inline] pub fn flags(mut self, flags: MemoryAllocateFlags) -> Self { self.flags = flags; self } #[inline] pub fn device_mask(mut self, device_mask: u32) -> Self { self.device_mask = device_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindBufferMemoryInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: Buffer, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindBufferMemoryInfo<'_> {} unsafe impl Sync for BindBufferMemoryInfo<'_> {} impl ::core::default::Default for BindBufferMemoryInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: Buffer::default(), memory: DeviceMemory::default(), memory_offset: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindBufferMemoryInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_BUFFER_MEMORY_INFO; } pub unsafe trait ExtendsBindBufferMemoryInfo {} impl<'a> BindBufferMemoryInfo<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindBufferMemoryDeviceGroupInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_index_count: u32, pub p_device_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindBufferMemoryDeviceGroupInfo<'_> {} unsafe impl Sync for BindBufferMemoryDeviceGroupInfo<'_> {} impl ::core::default::Default for BindBufferMemoryDeviceGroupInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_index_count: u32::default(), p_device_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindBufferMemoryDeviceGroupInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO; } unsafe impl ExtendsBindBufferMemoryInfo for BindBufferMemoryDeviceGroupInfo<'_> {} impl<'a> BindBufferMemoryDeviceGroupInfo<'a> { #[inline] pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self { self.device_index_count = device_indices.len() as _; self.p_device_indices = device_indices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindImageMemoryInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindImageMemoryInfo<'_> {} unsafe impl Sync for BindImageMemoryInfo<'_> {} impl ::core::default::Default for BindImageMemoryInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), memory: DeviceMemory::default(), memory_offset: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindImageMemoryInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_INFO; } pub unsafe trait ExtendsBindImageMemoryInfo {} impl<'a> BindImageMemoryInfo<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindImageMemoryDeviceGroupInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_index_count: u32, pub p_device_indices: *const u32, pub split_instance_bind_region_count: u32, pub p_split_instance_bind_regions: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindImageMemoryDeviceGroupInfo<'_> {} unsafe impl Sync for BindImageMemoryDeviceGroupInfo<'_> {} impl ::core::default::Default for BindImageMemoryDeviceGroupInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_index_count: u32::default(), p_device_indices: ::core::ptr::null(), split_instance_bind_region_count: u32::default(), p_split_instance_bind_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindImageMemoryDeviceGroupInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO; } unsafe impl ExtendsBindImageMemoryInfo for BindImageMemoryDeviceGroupInfo<'_> {} impl<'a> BindImageMemoryDeviceGroupInfo<'a> { #[inline] pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self { self.device_index_count = device_indices.len() as _; self.p_device_indices = device_indices.as_ptr(); self } #[inline] pub fn split_instance_bind_regions( mut self, split_instance_bind_regions: &'a [Rect2D], ) -> Self { self.split_instance_bind_region_count = split_instance_bind_regions.len() as _; self.p_split_instance_bind_regions = split_instance_bind_regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupRenderPassBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_mask: u32, pub device_render_area_count: u32, pub p_device_render_areas: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupRenderPassBeginInfo<'_> {} unsafe impl Sync for DeviceGroupRenderPassBeginInfo<'_> {} impl ::core::default::Default for DeviceGroupRenderPassBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_mask: u32::default(), device_render_area_count: u32::default(), p_device_render_areas: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupRenderPassBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO; } unsafe impl ExtendsRenderPassBeginInfo for DeviceGroupRenderPassBeginInfo<'_> {} unsafe impl ExtendsRenderingInfo for DeviceGroupRenderPassBeginInfo<'_> {} impl<'a> DeviceGroupRenderPassBeginInfo<'a> { #[inline] pub fn device_mask(mut self, device_mask: u32) -> Self { self.device_mask = device_mask; self } #[inline] pub fn device_render_areas(mut self, device_render_areas: &'a [Rect2D]) -> Self { self.device_render_area_count = device_render_areas.len() as _; self.p_device_render_areas = device_render_areas.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupCommandBufferBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_mask: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupCommandBufferBeginInfo<'_> {} unsafe impl Sync for DeviceGroupCommandBufferBeginInfo<'_> {} impl ::core::default::Default for DeviceGroupCommandBufferBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_mask: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupCommandBufferBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO; } unsafe impl ExtendsCommandBufferBeginInfo for DeviceGroupCommandBufferBeginInfo<'_> {} impl<'a> DeviceGroupCommandBufferBeginInfo<'a> { #[inline] pub fn device_mask(mut self, device_mask: u32) -> Self { self.device_mask = device_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupSubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_count: u32, pub p_wait_semaphore_device_indices: *const u32, pub command_buffer_count: u32, pub p_command_buffer_device_masks: *const u32, pub signal_semaphore_count: u32, pub p_signal_semaphore_device_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupSubmitInfo<'_> {} unsafe impl Sync for DeviceGroupSubmitInfo<'_> {} impl ::core::default::Default for DeviceGroupSubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_count: u32::default(), p_wait_semaphore_device_indices: ::core::ptr::null(), command_buffer_count: u32::default(), p_command_buffer_device_masks: ::core::ptr::null(), signal_semaphore_count: u32::default(), p_signal_semaphore_device_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupSubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_SUBMIT_INFO; } unsafe impl ExtendsSubmitInfo for DeviceGroupSubmitInfo<'_> {} impl<'a> DeviceGroupSubmitInfo<'a> { #[inline] pub fn wait_semaphore_device_indices( mut self, wait_semaphore_device_indices: &'a [u32], ) -> Self { self.wait_semaphore_count = wait_semaphore_device_indices.len() as _; self.p_wait_semaphore_device_indices = wait_semaphore_device_indices.as_ptr(); self } #[inline] pub fn command_buffer_device_masks(mut self, command_buffer_device_masks: &'a [u32]) -> Self { self.command_buffer_count = command_buffer_device_masks.len() as _; self.p_command_buffer_device_masks = command_buffer_device_masks.as_ptr(); self } #[inline] pub fn signal_semaphore_device_indices( mut self, signal_semaphore_device_indices: &'a [u32], ) -> Self { self.signal_semaphore_count = signal_semaphore_device_indices.len() as _; self.p_signal_semaphore_device_indices = signal_semaphore_device_indices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupBindSparseInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub resource_device_index: u32, pub memory_device_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupBindSparseInfo<'_> {} unsafe impl Sync for DeviceGroupBindSparseInfo<'_> {} impl ::core::default::Default for DeviceGroupBindSparseInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), resource_device_index: u32::default(), memory_device_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupBindSparseInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_BIND_SPARSE_INFO; } unsafe impl ExtendsBindSparseInfo for DeviceGroupBindSparseInfo<'_> {} impl<'a> DeviceGroupBindSparseInfo<'a> { #[inline] pub fn resource_device_index(mut self, resource_device_index: u32) -> Self { self.resource_device_index = resource_device_index; self } #[inline] pub fn memory_device_index(mut self, memory_device_index: u32) -> Self { self.memory_device_index = memory_device_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupPresentCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_mask: [u32; MAX_DEVICE_GROUP_SIZE], pub modes: DeviceGroupPresentModeFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupPresentCapabilitiesKHR<'_> {} unsafe impl Sync for DeviceGroupPresentCapabilitiesKHR<'_> {} impl ::core::default::Default for DeviceGroupPresentCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_mask: unsafe { ::core::mem::zeroed() }, modes: DeviceGroupPresentModeFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupPresentCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR; } impl<'a> DeviceGroupPresentCapabilitiesKHR<'a> { #[inline] pub fn present_mask(mut self, present_mask: [u32; MAX_DEVICE_GROUP_SIZE]) -> Self { self.present_mask = present_mask; self } #[inline] pub fn modes(mut self, modes: DeviceGroupPresentModeFlagsKHR) -> Self { self.modes = modes; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageSwapchainCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain: SwapchainKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageSwapchainCreateInfoKHR<'_> {} unsafe impl Sync for ImageSwapchainCreateInfoKHR<'_> {} impl ::core::default::Default for ImageSwapchainCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain: SwapchainKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageSwapchainCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SWAPCHAIN_CREATE_INFO_KHR; } unsafe impl ExtendsImageCreateInfo for ImageSwapchainCreateInfoKHR<'_> {} impl<'a> ImageSwapchainCreateInfoKHR<'a> { #[inline] pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self { self.swapchain = swapchain; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindImageMemorySwapchainInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain: SwapchainKHR, pub image_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindImageMemorySwapchainInfoKHR<'_> {} unsafe impl Sync for BindImageMemorySwapchainInfoKHR<'_> {} impl ::core::default::Default for BindImageMemorySwapchainInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain: SwapchainKHR::default(), image_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindImageMemorySwapchainInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR; } unsafe impl ExtendsBindImageMemoryInfo for BindImageMemorySwapchainInfoKHR<'_> {} impl<'a> BindImageMemorySwapchainInfoKHR<'a> { #[inline] pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self { self.swapchain = swapchain; self } #[inline] pub fn image_index(mut self, image_index: u32) -> Self { self.image_index = image_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AcquireNextImageInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain: SwapchainKHR, pub timeout: u64, pub semaphore: Semaphore, pub fence: Fence, pub device_mask: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AcquireNextImageInfoKHR<'_> {} unsafe impl Sync for AcquireNextImageInfoKHR<'_> {} impl ::core::default::Default for AcquireNextImageInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain: SwapchainKHR::default(), timeout: u64::default(), semaphore: Semaphore::default(), fence: Fence::default(), device_mask: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AcquireNextImageInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACQUIRE_NEXT_IMAGE_INFO_KHR; } impl<'a> AcquireNextImageInfoKHR<'a> { #[inline] pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self { self.swapchain = swapchain; self } #[inline] pub fn timeout(mut self, timeout: u64) -> Self { self.timeout = timeout; self } #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn fence(mut self, fence: Fence) -> Self { self.fence = fence; self } #[inline] pub fn device_mask(mut self, device_mask: u32) -> Self { self.device_mask = device_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupPresentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_device_masks: *const u32, pub mode: DeviceGroupPresentModeFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupPresentInfoKHR<'_> {} unsafe impl Sync for DeviceGroupPresentInfoKHR<'_> {} impl ::core::default::Default for DeviceGroupPresentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_device_masks: ::core::ptr::null(), mode: DeviceGroupPresentModeFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupPresentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_PRESENT_INFO_KHR; } unsafe impl ExtendsPresentInfoKHR for DeviceGroupPresentInfoKHR<'_> {} impl<'a> DeviceGroupPresentInfoKHR<'a> { #[inline] pub fn device_masks(mut self, device_masks: &'a [u32]) -> Self { self.swapchain_count = device_masks.len() as _; self.p_device_masks = device_masks.as_ptr(); self } #[inline] pub fn mode(mut self, mode: DeviceGroupPresentModeFlagsKHR) -> Self { self.mode = mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupDeviceCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub physical_device_count: u32, pub p_physical_devices: *const PhysicalDevice, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupDeviceCreateInfo<'_> {} unsafe impl Sync for DeviceGroupDeviceCreateInfo<'_> {} impl ::core::default::Default for DeviceGroupDeviceCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), physical_device_count: u32::default(), p_physical_devices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupDeviceCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_DEVICE_CREATE_INFO; } unsafe impl ExtendsDeviceCreateInfo for DeviceGroupDeviceCreateInfo<'_> {} impl<'a> DeviceGroupDeviceCreateInfo<'a> { #[inline] pub fn physical_devices(mut self, physical_devices: &'a [PhysicalDevice]) -> Self { self.physical_device_count = physical_devices.len() as _; self.p_physical_devices = physical_devices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceGroupSwapchainCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub modes: DeviceGroupPresentModeFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceGroupSwapchainCreateInfoKHR<'_> {} unsafe impl Sync for DeviceGroupSwapchainCreateInfoKHR<'_> {} impl ::core::default::Default for DeviceGroupSwapchainCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), modes: DeviceGroupPresentModeFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceGroupSwapchainCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR; } unsafe impl ExtendsSwapchainCreateInfoKHR for DeviceGroupSwapchainCreateInfoKHR<'_> {} impl<'a> DeviceGroupSwapchainCreateInfoKHR<'a> { #[inline] pub fn modes(mut self, modes: DeviceGroupPresentModeFlagsKHR) -> Self { self.modes = modes; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DescriptorUpdateTemplateEntry { pub dst_binding: u32, pub dst_array_element: u32, pub descriptor_count: u32, pub descriptor_type: DescriptorType, pub offset: usize, pub stride: usize, } impl DescriptorUpdateTemplateEntry { #[inline] pub fn dst_binding(mut self, dst_binding: u32) -> Self { self.dst_binding = dst_binding; self } #[inline] pub fn dst_array_element(mut self, dst_array_element: u32) -> Self { self.dst_array_element = dst_array_element; self } #[inline] pub fn descriptor_count(mut self, descriptor_count: u32) -> Self { self.descriptor_count = descriptor_count; self } #[inline] pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self { self.descriptor_type = descriptor_type; self } #[inline] pub fn offset(mut self, offset: usize) -> Self { self.offset = offset; self } #[inline] pub fn stride(mut self, stride: usize) -> Self { self.stride = stride; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorUpdateTemplateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DescriptorUpdateTemplateCreateFlags, pub descriptor_update_entry_count: u32, pub p_descriptor_update_entries: *const DescriptorUpdateTemplateEntry, pub template_type: DescriptorUpdateTemplateType, pub descriptor_set_layout: DescriptorSetLayout, pub pipeline_bind_point: PipelineBindPoint, pub pipeline_layout: PipelineLayout, pub set: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorUpdateTemplateCreateInfo<'_> {} unsafe impl Sync for DescriptorUpdateTemplateCreateInfo<'_> {} impl ::core::default::Default for DescriptorUpdateTemplateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DescriptorUpdateTemplateCreateFlags::default(), descriptor_update_entry_count: u32::default(), p_descriptor_update_entries: ::core::ptr::null(), template_type: DescriptorUpdateTemplateType::default(), descriptor_set_layout: DescriptorSetLayout::default(), pipeline_bind_point: PipelineBindPoint::default(), pipeline_layout: PipelineLayout::default(), set: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorUpdateTemplateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO; } impl<'a> DescriptorUpdateTemplateCreateInfo<'a> { #[inline] pub fn flags(mut self, flags: DescriptorUpdateTemplateCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn descriptor_update_entries( mut self, descriptor_update_entries: &'a [DescriptorUpdateTemplateEntry], ) -> Self { self.descriptor_update_entry_count = descriptor_update_entries.len() as _; self.p_descriptor_update_entries = descriptor_update_entries.as_ptr(); self } #[inline] pub fn template_type(mut self, template_type: DescriptorUpdateTemplateType) -> Self { self.template_type = template_type; self } #[inline] pub fn descriptor_set_layout(mut self, descriptor_set_layout: DescriptorSetLayout) -> Self { self.descriptor_set_layout = descriptor_set_layout; self } #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn pipeline_layout(mut self, pipeline_layout: PipelineLayout) -> Self { self.pipeline_layout = pipeline_layout; self } #[inline] pub fn set(mut self, set: u32) -> Self { self.set = set; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct XYColorEXT { pub x: f32, pub y: f32, } impl XYColorEXT { #[inline] pub fn x(mut self, x: f32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: f32) -> Self { self.y = y; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePresentIdFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_id: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePresentIdFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDevicePresentIdFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePresentIdFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_id: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePresentIdFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentIdFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentIdFeaturesKHR<'_> {} impl<'a> PhysicalDevicePresentIdFeaturesKHR<'a> { #[inline] pub fn present_id(mut self, present_id: bool) -> Self { self.present_id = present_id.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentIdKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_present_ids: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentIdKHR<'_> {} unsafe impl Sync for PresentIdKHR<'_> {} impl ::core::default::Default for PresentIdKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_present_ids: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PresentIdKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_ID_KHR; } unsafe impl ExtendsPresentInfoKHR for PresentIdKHR<'_> {} impl<'a> PresentIdKHR<'a> { #[inline] pub fn present_ids(mut self, present_ids: &'a [u64]) -> Self { self.swapchain_count = present_ids.len() as _; self.p_present_ids = present_ids.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePresentWaitFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_wait: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePresentWaitFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDevicePresentWaitFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePresentWaitFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_wait: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePresentWaitFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentWaitFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentWaitFeaturesKHR<'_> {} impl<'a> PhysicalDevicePresentWaitFeaturesKHR<'a> { #[inline] pub fn present_wait(mut self, present_wait: bool) -> Self { self.present_wait = present_wait.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct HdrMetadataEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub display_primary_red: XYColorEXT, pub display_primary_green: XYColorEXT, pub display_primary_blue: XYColorEXT, pub white_point: XYColorEXT, pub max_luminance: f32, pub min_luminance: f32, pub max_content_light_level: f32, pub max_frame_average_light_level: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for HdrMetadataEXT<'_> {} unsafe impl Sync for HdrMetadataEXT<'_> {} impl ::core::default::Default for HdrMetadataEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), display_primary_red: XYColorEXT::default(), display_primary_green: XYColorEXT::default(), display_primary_blue: XYColorEXT::default(), white_point: XYColorEXT::default(), max_luminance: f32::default(), min_luminance: f32::default(), max_content_light_level: f32::default(), max_frame_average_light_level: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for HdrMetadataEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::HDR_METADATA_EXT; } impl<'a> HdrMetadataEXT<'a> { #[inline] pub fn display_primary_red(mut self, display_primary_red: XYColorEXT) -> Self { self.display_primary_red = display_primary_red; self } #[inline] pub fn display_primary_green(mut self, display_primary_green: XYColorEXT) -> Self { self.display_primary_green = display_primary_green; self } #[inline] pub fn display_primary_blue(mut self, display_primary_blue: XYColorEXT) -> Self { self.display_primary_blue = display_primary_blue; self } #[inline] pub fn white_point(mut self, white_point: XYColorEXT) -> Self { self.white_point = white_point; self } #[inline] pub fn max_luminance(mut self, max_luminance: f32) -> Self { self.max_luminance = max_luminance; self } #[inline] pub fn min_luminance(mut self, min_luminance: f32) -> Self { self.min_luminance = min_luminance; self } #[inline] pub fn max_content_light_level(mut self, max_content_light_level: f32) -> Self { self.max_content_light_level = max_content_light_level; self } #[inline] pub fn max_frame_average_light_level(mut self, max_frame_average_light_level: f32) -> Self { self.max_frame_average_light_level = max_frame_average_light_level; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayNativeHdrSurfaceCapabilitiesAMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub local_dimming_support: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayNativeHdrSurfaceCapabilitiesAMD<'_> {} unsafe impl Sync for DisplayNativeHdrSurfaceCapabilitiesAMD<'_> {} impl ::core::default::Default for DisplayNativeHdrSurfaceCapabilitiesAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), local_dimming_support: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayNativeHdrSurfaceCapabilitiesAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD; } unsafe impl ExtendsSurfaceCapabilities2KHR for DisplayNativeHdrSurfaceCapabilitiesAMD<'_> {} impl<'a> DisplayNativeHdrSurfaceCapabilitiesAMD<'a> { #[inline] pub fn local_dimming_support(mut self, local_dimming_support: bool) -> Self { self.local_dimming_support = local_dimming_support.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainDisplayNativeHdrCreateInfoAMD<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub local_dimming_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainDisplayNativeHdrCreateInfoAMD<'_> {} unsafe impl Sync for SwapchainDisplayNativeHdrCreateInfoAMD<'_> {} impl ::core::default::Default for SwapchainDisplayNativeHdrCreateInfoAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), local_dimming_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainDisplayNativeHdrCreateInfoAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainDisplayNativeHdrCreateInfoAMD<'_> {} impl<'a> SwapchainDisplayNativeHdrCreateInfoAMD<'a> { #[inline] pub fn local_dimming_enable(mut self, local_dimming_enable: bool) -> Self { self.local_dimming_enable = local_dimming_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct RefreshCycleDurationGOOGLE { pub refresh_duration: u64, } impl RefreshCycleDurationGOOGLE { #[inline] pub fn refresh_duration(mut self, refresh_duration: u64) -> Self { self.refresh_duration = refresh_duration; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PastPresentationTimingGOOGLE { pub present_id: u32, pub desired_present_time: u64, pub actual_present_time: u64, pub earliest_present_time: u64, pub present_margin: u64, } impl PastPresentationTimingGOOGLE { #[inline] pub fn present_id(mut self, present_id: u32) -> Self { self.present_id = present_id; self } #[inline] pub fn desired_present_time(mut self, desired_present_time: u64) -> Self { self.desired_present_time = desired_present_time; self } #[inline] pub fn actual_present_time(mut self, actual_present_time: u64) -> Self { self.actual_present_time = actual_present_time; self } #[inline] pub fn earliest_present_time(mut self, earliest_present_time: u64) -> Self { self.earliest_present_time = earliest_present_time; self } #[inline] pub fn present_margin(mut self, present_margin: u64) -> Self { self.present_margin = present_margin; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentTimesInfoGOOGLE<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_times: *const PresentTimeGOOGLE, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentTimesInfoGOOGLE<'_> {} unsafe impl Sync for PresentTimesInfoGOOGLE<'_> {} impl ::core::default::Default for PresentTimesInfoGOOGLE<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_times: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PresentTimesInfoGOOGLE<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_TIMES_INFO_GOOGLE; } unsafe impl ExtendsPresentInfoKHR for PresentTimesInfoGOOGLE<'_> {} impl<'a> PresentTimesInfoGOOGLE<'a> { #[inline] pub fn times(mut self, times: &'a [PresentTimeGOOGLE]) -> Self { self.swapchain_count = times.len() as _; self.p_times = times.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PresentTimeGOOGLE { pub present_id: u32, pub desired_present_time: u64, } impl PresentTimeGOOGLE { #[inline] pub fn present_id(mut self, present_id: u32) -> Self { self.present_id = present_id; self } #[inline] pub fn desired_present_time(mut self, desired_present_time: u64) -> Self { self.desired_present_time = desired_present_time; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct IOSSurfaceCreateInfoMVK<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: IOSSurfaceCreateFlagsMVK, pub p_view: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for IOSSurfaceCreateInfoMVK<'_> {} unsafe impl Sync for IOSSurfaceCreateInfoMVK<'_> {} impl ::core::default::Default for IOSSurfaceCreateInfoMVK<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: IOSSurfaceCreateFlagsMVK::default(), p_view: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for IOSSurfaceCreateInfoMVK<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IOS_SURFACE_CREATE_INFO_MVK; } impl<'a> IOSSurfaceCreateInfoMVK<'a> { #[inline] pub fn flags(mut self, flags: IOSSurfaceCreateFlagsMVK) -> Self { self.flags = flags; self } #[inline] pub fn view(mut self, view: *const c_void) -> Self { self.p_view = view; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MacOSSurfaceCreateInfoMVK<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: MacOSSurfaceCreateFlagsMVK, pub p_view: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MacOSSurfaceCreateInfoMVK<'_> {} unsafe impl Sync for MacOSSurfaceCreateInfoMVK<'_> {} impl ::core::default::Default for MacOSSurfaceCreateInfoMVK<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: MacOSSurfaceCreateFlagsMVK::default(), p_view: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MacOSSurfaceCreateInfoMVK<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MACOS_SURFACE_CREATE_INFO_MVK; } impl<'a> MacOSSurfaceCreateInfoMVK<'a> { #[inline] pub fn flags(mut self, flags: MacOSSurfaceCreateFlagsMVK) -> Self { self.flags = flags; self } #[inline] pub fn view(mut self, view: *const c_void) -> Self { self.p_view = view; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MetalSurfaceCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: MetalSurfaceCreateFlagsEXT, pub p_layer: *const CAMetalLayer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MetalSurfaceCreateInfoEXT<'_> {} unsafe impl Sync for MetalSurfaceCreateInfoEXT<'_> {} impl ::core::default::Default for MetalSurfaceCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: MetalSurfaceCreateFlagsEXT::default(), p_layer: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MetalSurfaceCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::METAL_SURFACE_CREATE_INFO_EXT; } impl<'a> MetalSurfaceCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: MetalSurfaceCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn layer(mut self, layer: *const CAMetalLayer) -> Self { self.p_layer = layer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ViewportWScalingNV { pub xcoeff: f32, pub ycoeff: f32, } impl ViewportWScalingNV { #[inline] pub fn xcoeff(mut self, xcoeff: f32) -> Self { self.xcoeff = xcoeff; self } #[inline] pub fn ycoeff(mut self, ycoeff: f32) -> Self { self.ycoeff = ycoeff; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportWScalingStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub viewport_w_scaling_enable: Bool32, pub viewport_count: u32, pub p_viewport_w_scalings: *const ViewportWScalingNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportWScalingStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineViewportWScalingStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineViewportWScalingStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), viewport_w_scaling_enable: Bool32::default(), viewport_count: u32::default(), p_viewport_w_scalings: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportWScalingStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportWScalingStateCreateInfoNV<'_> { } impl<'a> PipelineViewportWScalingStateCreateInfoNV<'a> { #[inline] pub fn viewport_w_scaling_enable(mut self, viewport_w_scaling_enable: bool) -> Self { self.viewport_w_scaling_enable = viewport_w_scaling_enable.into(); self } #[inline] pub fn viewport_w_scalings(mut self, viewport_w_scalings: &'a [ViewportWScalingNV]) -> Self { self.viewport_count = viewport_w_scalings.len() as _; self.p_viewport_w_scalings = viewport_w_scalings.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ViewportSwizzleNV { pub x: ViewportCoordinateSwizzleNV, pub y: ViewportCoordinateSwizzleNV, pub z: ViewportCoordinateSwizzleNV, pub w: ViewportCoordinateSwizzleNV, } impl ViewportSwizzleNV { #[inline] pub fn x(mut self, x: ViewportCoordinateSwizzleNV) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: ViewportCoordinateSwizzleNV) -> Self { self.y = y; self } #[inline] pub fn z(mut self, z: ViewportCoordinateSwizzleNV) -> Self { self.z = z; self } #[inline] pub fn w(mut self, w: ViewportCoordinateSwizzleNV) -> Self { self.w = w; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportSwizzleStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineViewportSwizzleStateCreateFlagsNV, pub viewport_count: u32, pub p_viewport_swizzles: *const ViewportSwizzleNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportSwizzleStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineViewportSwizzleStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineViewportSwizzleStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineViewportSwizzleStateCreateFlagsNV::default(), viewport_count: u32::default(), p_viewport_swizzles: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportSwizzleStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportSwizzleStateCreateInfoNV<'_> { } impl<'a> PipelineViewportSwizzleStateCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: PipelineViewportSwizzleStateCreateFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn viewport_swizzles(mut self, viewport_swizzles: &'a [ViewportSwizzleNV]) -> Self { self.viewport_count = viewport_swizzles.len() as _; self.p_viewport_swizzles = viewport_swizzles.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDiscardRectanglePropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_discard_rectangles: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDiscardRectanglePropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDiscardRectanglePropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDiscardRectanglePropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_discard_rectangles: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDiscardRectanglePropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDiscardRectanglePropertiesEXT<'_> {} impl<'a> PhysicalDeviceDiscardRectanglePropertiesEXT<'a> { #[inline] pub fn max_discard_rectangles(mut self, max_discard_rectangles: u32) -> Self { self.max_discard_rectangles = max_discard_rectangles; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineDiscardRectangleStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineDiscardRectangleStateCreateFlagsEXT, pub discard_rectangle_mode: DiscardRectangleModeEXT, pub discard_rectangle_count: u32, pub p_discard_rectangles: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineDiscardRectangleStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineDiscardRectangleStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineDiscardRectangleStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineDiscardRectangleStateCreateFlagsEXT::default(), discard_rectangle_mode: DiscardRectangleModeEXT::default(), discard_rectangle_count: u32::default(), p_discard_rectangles: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineDiscardRectangleStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineDiscardRectangleStateCreateInfoEXT<'_> {} impl<'a> PipelineDiscardRectangleStateCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: PipelineDiscardRectangleStateCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn discard_rectangle_mode( mut self, discard_rectangle_mode: DiscardRectangleModeEXT, ) -> Self { self.discard_rectangle_mode = discard_rectangle_mode; self } #[inline] pub fn discard_rectangles(mut self, discard_rectangles: &'a [Rect2D]) -> Self { self.discard_rectangle_count = discard_rectangles.len() as _; self.p_discard_rectangles = discard_rectangles.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub per_view_position_all_components: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'_> {} unsafe impl Sync for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'_> {} impl ::core::default::Default for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), per_view_position_all_components: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'_> { } impl<'a> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX<'a> { #[inline] pub fn per_view_position_all_components( mut self, per_view_position_all_components: bool, ) -> Self { self.per_view_position_all_components = per_view_position_all_components.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct InputAttachmentAspectReference { pub subpass: u32, pub input_attachment_index: u32, pub aspect_mask: ImageAspectFlags, } impl InputAttachmentAspectReference { #[inline] pub fn subpass(mut self, subpass: u32) -> Self { self.subpass = subpass; self } #[inline] pub fn input_attachment_index(mut self, input_attachment_index: u32) -> Self { self.input_attachment_index = input_attachment_index; self } #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassInputAttachmentAspectCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub aspect_reference_count: u32, pub p_aspect_references: *const InputAttachmentAspectReference, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassInputAttachmentAspectCreateInfo<'_> {} unsafe impl Sync for RenderPassInputAttachmentAspectCreateInfo<'_> {} impl ::core::default::Default for RenderPassInputAttachmentAspectCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), aspect_reference_count: u32::default(), p_aspect_references: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassInputAttachmentAspectCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO; } unsafe impl ExtendsRenderPassCreateInfo for RenderPassInputAttachmentAspectCreateInfo<'_> {} impl<'a> RenderPassInputAttachmentAspectCreateInfo<'a> { #[inline] pub fn aspect_references( mut self, aspect_references: &'a [InputAttachmentAspectReference], ) -> Self { self.aspect_reference_count = aspect_references.len() as _; self.p_aspect_references = aspect_references.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSurfaceInfo2KHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub surface: SurfaceKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSurfaceInfo2KHR<'_> {} unsafe impl Sync for PhysicalDeviceSurfaceInfo2KHR<'_> {} impl ::core::default::Default for PhysicalDeviceSurfaceInfo2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), surface: SurfaceKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSurfaceInfo2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR; } pub unsafe trait ExtendsPhysicalDeviceSurfaceInfo2KHR {} impl<'a> PhysicalDeviceSurfaceInfo2KHR<'a> { #[inline] pub fn surface(mut self, surface: SurfaceKHR) -> Self { self.surface = surface; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceCapabilities2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub surface_capabilities: SurfaceCapabilitiesKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceCapabilities2KHR<'_> {} unsafe impl Sync for SurfaceCapabilities2KHR<'_> {} impl ::core::default::Default for SurfaceCapabilities2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), surface_capabilities: SurfaceCapabilitiesKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceCapabilities2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_2_KHR; } pub unsafe trait ExtendsSurfaceCapabilities2KHR {} impl<'a> SurfaceCapabilities2KHR<'a> { #[inline] pub fn surface_capabilities(mut self, surface_capabilities: SurfaceCapabilitiesKHR) -> Self { self.surface_capabilities = surface_capabilities; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceFormat2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub surface_format: SurfaceFormatKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceFormat2KHR<'_> {} unsafe impl Sync for SurfaceFormat2KHR<'_> {} impl ::core::default::Default for SurfaceFormat2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), surface_format: SurfaceFormatKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceFormat2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_FORMAT_2_KHR; } pub unsafe trait ExtendsSurfaceFormat2KHR {} impl<'a> SurfaceFormat2KHR<'a> { #[inline] pub fn surface_format(mut self, surface_format: SurfaceFormatKHR) -> Self { self.surface_format = surface_format; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayProperties2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub display_properties: DisplayPropertiesKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayProperties2KHR<'_> {} unsafe impl Sync for DisplayProperties2KHR<'_> {} impl ::core::default::Default for DisplayProperties2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), display_properties: DisplayPropertiesKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayProperties2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PROPERTIES_2_KHR; } impl<'a> DisplayProperties2KHR<'a> { #[inline] pub fn display_properties(mut self, display_properties: DisplayPropertiesKHR<'a>) -> Self { self.display_properties = display_properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPlaneProperties2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub display_plane_properties: DisplayPlanePropertiesKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPlaneProperties2KHR<'_> {} unsafe impl Sync for DisplayPlaneProperties2KHR<'_> {} impl ::core::default::Default for DisplayPlaneProperties2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), display_plane_properties: DisplayPlanePropertiesKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayPlaneProperties2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_PROPERTIES_2_KHR; } impl<'a> DisplayPlaneProperties2KHR<'a> { #[inline] pub fn display_plane_properties( mut self, display_plane_properties: DisplayPlanePropertiesKHR, ) -> Self { self.display_plane_properties = display_plane_properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayModeProperties2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub display_mode_properties: DisplayModePropertiesKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayModeProperties2KHR<'_> {} unsafe impl Sync for DisplayModeProperties2KHR<'_> {} impl ::core::default::Default for DisplayModeProperties2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), display_mode_properties: DisplayModePropertiesKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayModeProperties2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_MODE_PROPERTIES_2_KHR; } impl<'a> DisplayModeProperties2KHR<'a> { #[inline] pub fn display_mode_properties( mut self, display_mode_properties: DisplayModePropertiesKHR, ) -> Self { self.display_mode_properties = display_mode_properties; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPlaneInfo2KHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mode: DisplayModeKHR, pub plane_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPlaneInfo2KHR<'_> {} unsafe impl Sync for DisplayPlaneInfo2KHR<'_> {} impl ::core::default::Default for DisplayPlaneInfo2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mode: DisplayModeKHR::default(), plane_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayPlaneInfo2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_INFO_2_KHR; } impl<'a> DisplayPlaneInfo2KHR<'a> { #[inline] pub fn mode(mut self, mode: DisplayModeKHR) -> Self { self.mode = mode; self } #[inline] pub fn plane_index(mut self, plane_index: u32) -> Self { self.plane_index = plane_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DisplayPlaneCapabilities2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub capabilities: DisplayPlaneCapabilitiesKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DisplayPlaneCapabilities2KHR<'_> {} unsafe impl Sync for DisplayPlaneCapabilities2KHR<'_> {} impl ::core::default::Default for DisplayPlaneCapabilities2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), capabilities: DisplayPlaneCapabilitiesKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DisplayPlaneCapabilities2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_CAPABILITIES_2_KHR; } impl<'a> DisplayPlaneCapabilities2KHR<'a> { #[inline] pub fn capabilities(mut self, capabilities: DisplayPlaneCapabilitiesKHR) -> Self { self.capabilities = capabilities; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SharedPresentSurfaceCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shared_present_supported_usage_flags: ImageUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SharedPresentSurfaceCapabilitiesKHR<'_> {} unsafe impl Sync for SharedPresentSurfaceCapabilitiesKHR<'_> {} impl ::core::default::Default for SharedPresentSurfaceCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shared_present_supported_usage_flags: ImageUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SharedPresentSurfaceCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR; } unsafe impl ExtendsSurfaceCapabilities2KHR for SharedPresentSurfaceCapabilitiesKHR<'_> {} impl<'a> SharedPresentSurfaceCapabilitiesKHR<'a> { #[inline] pub fn shared_present_supported_usage_flags( mut self, shared_present_supported_usage_flags: ImageUsageFlags, ) -> Self { self.shared_present_supported_usage_flags = shared_present_supported_usage_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevice16BitStorageFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub storage_buffer16_bit_access: Bool32, pub uniform_and_storage_buffer16_bit_access: Bool32, pub storage_push_constant16: Bool32, pub storage_input_output16: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevice16BitStorageFeatures<'_> {} unsafe impl Sync for PhysicalDevice16BitStorageFeatures<'_> {} impl ::core::default::Default for PhysicalDevice16BitStorageFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), storage_buffer16_bit_access: Bool32::default(), uniform_and_storage_buffer16_bit_access: Bool32::default(), storage_push_constant16: Bool32::default(), storage_input_output16: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevice16BitStorageFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice16BitStorageFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice16BitStorageFeatures<'_> {} impl<'a> PhysicalDevice16BitStorageFeatures<'a> { #[inline] pub fn storage_buffer16_bit_access(mut self, storage_buffer16_bit_access: bool) -> Self { self.storage_buffer16_bit_access = storage_buffer16_bit_access.into(); self } #[inline] pub fn uniform_and_storage_buffer16_bit_access( mut self, uniform_and_storage_buffer16_bit_access: bool, ) -> Self { self.uniform_and_storage_buffer16_bit_access = uniform_and_storage_buffer16_bit_access.into(); self } #[inline] pub fn storage_push_constant16(mut self, storage_push_constant16: bool) -> Self { self.storage_push_constant16 = storage_push_constant16.into(); self } #[inline] pub fn storage_input_output16(mut self, storage_input_output16: bool) -> Self { self.storage_input_output16 = storage_input_output16.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubgroupProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subgroup_size: u32, pub supported_stages: ShaderStageFlags, pub supported_operations: SubgroupFeatureFlags, pub quad_operations_in_all_stages: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubgroupProperties<'_> {} unsafe impl Sync for PhysicalDeviceSubgroupProperties<'_> {} impl ::core::default::Default for PhysicalDeviceSubgroupProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subgroup_size: u32::default(), supported_stages: ShaderStageFlags::default(), supported_operations: SubgroupFeatureFlags::default(), quad_operations_in_all_stages: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubgroupProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupProperties<'_> {} impl<'a> PhysicalDeviceSubgroupProperties<'a> { #[inline] pub fn subgroup_size(mut self, subgroup_size: u32) -> Self { self.subgroup_size = subgroup_size; self } #[inline] pub fn supported_stages(mut self, supported_stages: ShaderStageFlags) -> Self { self.supported_stages = supported_stages; self } #[inline] pub fn supported_operations(mut self, supported_operations: SubgroupFeatureFlags) -> Self { self.supported_operations = supported_operations; self } #[inline] pub fn quad_operations_in_all_stages(mut self, quad_operations_in_all_stages: bool) -> Self { self.quad_operations_in_all_stages = quad_operations_in_all_stages.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_subgroup_extended_types: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'_> {} unsafe impl Sync for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_subgroup_extended_types: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'_> {} impl<'a> PhysicalDeviceShaderSubgroupExtendedTypesFeatures<'a> { #[inline] pub fn shader_subgroup_extended_types(mut self, shader_subgroup_extended_types: bool) -> Self { self.shader_subgroup_extended_types = shader_subgroup_extended_types.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferMemoryRequirementsInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferMemoryRequirementsInfo2<'_> {} unsafe impl Sync for BufferMemoryRequirementsInfo2<'_> {} impl ::core::default::Default for BufferMemoryRequirementsInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferMemoryRequirementsInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_REQUIREMENTS_INFO_2; } impl<'a> BufferMemoryRequirementsInfo2<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceBufferMemoryRequirements<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_create_info: *const BufferCreateInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceBufferMemoryRequirements<'_> {} unsafe impl Sync for DeviceBufferMemoryRequirements<'_> {} impl ::core::default::Default for DeviceBufferMemoryRequirements<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_create_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceBufferMemoryRequirements<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_BUFFER_MEMORY_REQUIREMENTS; } impl<'a> DeviceBufferMemoryRequirements<'a> { #[inline] pub fn create_info(mut self, create_info: &'a BufferCreateInfo<'a>) -> Self { self.p_create_info = create_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageMemoryRequirementsInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageMemoryRequirementsInfo2<'_> {} unsafe impl Sync for ImageMemoryRequirementsInfo2<'_> {} impl ::core::default::Default for ImageMemoryRequirementsInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageMemoryRequirementsInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_REQUIREMENTS_INFO_2; } pub unsafe trait ExtendsImageMemoryRequirementsInfo2 {} impl<'a> ImageMemoryRequirementsInfo2<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageSparseMemoryRequirementsInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageSparseMemoryRequirementsInfo2<'_> {} unsafe impl Sync for ImageSparseMemoryRequirementsInfo2<'_> {} impl ::core::default::Default for ImageSparseMemoryRequirementsInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageSparseMemoryRequirementsInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2; } impl<'a> ImageSparseMemoryRequirementsInfo2<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceImageMemoryRequirements<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_create_info: *const ImageCreateInfo<'a>, pub plane_aspect: ImageAspectFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceImageMemoryRequirements<'_> {} unsafe impl Sync for DeviceImageMemoryRequirements<'_> {} impl ::core::default::Default for DeviceImageMemoryRequirements<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_create_info: ::core::ptr::null(), plane_aspect: ImageAspectFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceImageMemoryRequirements<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_IMAGE_MEMORY_REQUIREMENTS; } impl<'a> DeviceImageMemoryRequirements<'a> { #[inline] pub fn create_info(mut self, create_info: &'a ImageCreateInfo<'a>) -> Self { self.p_create_info = create_info; self } #[inline] pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self { self.plane_aspect = plane_aspect; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryRequirements2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_requirements: MemoryRequirements, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryRequirements2<'_> {} unsafe impl Sync for MemoryRequirements2<'_> {} impl ::core::default::Default for MemoryRequirements2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_requirements: MemoryRequirements::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryRequirements2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_REQUIREMENTS_2; } pub unsafe trait ExtendsMemoryRequirements2 {} impl<'a> MemoryRequirements2<'a> { #[inline] pub fn memory_requirements(mut self, memory_requirements: MemoryRequirements) -> Self { self.memory_requirements = memory_requirements; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SparseImageMemoryRequirements2<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_requirements: SparseImageMemoryRequirements, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SparseImageMemoryRequirements2<'_> {} unsafe impl Sync for SparseImageMemoryRequirements2<'_> {} impl ::core::default::Default for SparseImageMemoryRequirements2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_requirements: SparseImageMemoryRequirements::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SparseImageMemoryRequirements2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2; } impl<'a> SparseImageMemoryRequirements2<'a> { #[inline] pub fn memory_requirements( mut self, memory_requirements: SparseImageMemoryRequirements, ) -> Self { self.memory_requirements = memory_requirements; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePointClippingProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub point_clipping_behavior: PointClippingBehavior, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePointClippingProperties<'_> {} unsafe impl Sync for PhysicalDevicePointClippingProperties<'_> {} impl ::core::default::Default for PhysicalDevicePointClippingProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), point_clipping_behavior: PointClippingBehavior::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePointClippingProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePointClippingProperties<'_> {} impl<'a> PhysicalDevicePointClippingProperties<'a> { #[inline] pub fn point_clipping_behavior( mut self, point_clipping_behavior: PointClippingBehavior, ) -> Self { self.point_clipping_behavior = point_clipping_behavior; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryDedicatedRequirements<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub prefers_dedicated_allocation: Bool32, pub requires_dedicated_allocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryDedicatedRequirements<'_> {} unsafe impl Sync for MemoryDedicatedRequirements<'_> {} impl ::core::default::Default for MemoryDedicatedRequirements<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), prefers_dedicated_allocation: Bool32::default(), requires_dedicated_allocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryDedicatedRequirements<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_DEDICATED_REQUIREMENTS; } unsafe impl ExtendsMemoryRequirements2 for MemoryDedicatedRequirements<'_> {} impl<'a> MemoryDedicatedRequirements<'a> { #[inline] pub fn prefers_dedicated_allocation(mut self, prefers_dedicated_allocation: bool) -> Self { self.prefers_dedicated_allocation = prefers_dedicated_allocation.into(); self } #[inline] pub fn requires_dedicated_allocation(mut self, requires_dedicated_allocation: bool) -> Self { self.requires_dedicated_allocation = requires_dedicated_allocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryDedicatedAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryDedicatedAllocateInfo<'_> {} unsafe impl Sync for MemoryDedicatedAllocateInfo<'_> {} impl ::core::default::Default for MemoryDedicatedAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryDedicatedAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_DEDICATED_ALLOCATE_INFO; } unsafe impl ExtendsMemoryAllocateInfo for MemoryDedicatedAllocateInfo<'_> {} impl<'a> MemoryDedicatedAllocateInfo<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewUsageCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub usage: ImageUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewUsageCreateInfo<'_> {} unsafe impl Sync for ImageViewUsageCreateInfo<'_> {} impl ::core::default::Default for ImageViewUsageCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), usage: ImageUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewUsageCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_USAGE_CREATE_INFO; } unsafe impl ExtendsImageViewCreateInfo for ImageViewUsageCreateInfo<'_> {} impl<'a> ImageViewUsageCreateInfo<'a> { #[inline] pub fn usage(mut self, usage: ImageUsageFlags) -> Self { self.usage = usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewSlicedCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub slice_offset: u32, pub slice_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewSlicedCreateInfoEXT<'_> {} unsafe impl Sync for ImageViewSlicedCreateInfoEXT<'_> {} impl ::core::default::Default for ImageViewSlicedCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), slice_offset: u32::default(), slice_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewSlicedCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_SLICED_CREATE_INFO_EXT; } unsafe impl ExtendsImageViewCreateInfo for ImageViewSlicedCreateInfoEXT<'_> {} impl<'a> ImageViewSlicedCreateInfoEXT<'a> { #[inline] pub fn slice_offset(mut self, slice_offset: u32) -> Self { self.slice_offset = slice_offset; self } #[inline] pub fn slice_count(mut self, slice_count: u32) -> Self { self.slice_count = slice_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineTessellationDomainOriginStateCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub domain_origin: TessellationDomainOrigin, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineTessellationDomainOriginStateCreateInfo<'_> {} unsafe impl Sync for PipelineTessellationDomainOriginStateCreateInfo<'_> {} impl ::core::default::Default for PipelineTessellationDomainOriginStateCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), domain_origin: TessellationDomainOrigin::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineTessellationDomainOriginStateCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO; } unsafe impl ExtendsPipelineTessellationStateCreateInfo for PipelineTessellationDomainOriginStateCreateInfo<'_> { } impl<'a> PipelineTessellationDomainOriginStateCreateInfo<'a> { #[inline] pub fn domain_origin(mut self, domain_origin: TessellationDomainOrigin) -> Self { self.domain_origin = domain_origin; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerYcbcrConversionInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub conversion: SamplerYcbcrConversion, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerYcbcrConversionInfo<'_> {} unsafe impl Sync for SamplerYcbcrConversionInfo<'_> {} impl ::core::default::Default for SamplerYcbcrConversionInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), conversion: SamplerYcbcrConversion::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerYcbcrConversionInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_INFO; } unsafe impl ExtendsSamplerCreateInfo for SamplerYcbcrConversionInfo<'_> {} unsafe impl ExtendsImageViewCreateInfo for SamplerYcbcrConversionInfo<'_> {} impl<'a> SamplerYcbcrConversionInfo<'a> { #[inline] pub fn conversion(mut self, conversion: SamplerYcbcrConversion) -> Self { self.conversion = conversion; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerYcbcrConversionCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub format: Format, pub ycbcr_model: SamplerYcbcrModelConversion, pub ycbcr_range: SamplerYcbcrRange, pub components: ComponentMapping, pub x_chroma_offset: ChromaLocation, pub y_chroma_offset: ChromaLocation, pub chroma_filter: Filter, pub force_explicit_reconstruction: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerYcbcrConversionCreateInfo<'_> {} unsafe impl Sync for SamplerYcbcrConversionCreateInfo<'_> {} impl ::core::default::Default for SamplerYcbcrConversionCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), format: Format::default(), ycbcr_model: SamplerYcbcrModelConversion::default(), ycbcr_range: SamplerYcbcrRange::default(), components: ComponentMapping::default(), x_chroma_offset: ChromaLocation::default(), y_chroma_offset: ChromaLocation::default(), chroma_filter: Filter::default(), force_explicit_reconstruction: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerYcbcrConversionCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_CREATE_INFO; } pub unsafe trait ExtendsSamplerYcbcrConversionCreateInfo {} impl<'a> SamplerYcbcrConversionCreateInfo<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn ycbcr_model(mut self, ycbcr_model: SamplerYcbcrModelConversion) -> Self { self.ycbcr_model = ycbcr_model; self } #[inline] pub fn ycbcr_range(mut self, ycbcr_range: SamplerYcbcrRange) -> Self { self.ycbcr_range = ycbcr_range; self } #[inline] pub fn components(mut self, components: ComponentMapping) -> Self { self.components = components; self } #[inline] pub fn x_chroma_offset(mut self, x_chroma_offset: ChromaLocation) -> Self { self.x_chroma_offset = x_chroma_offset; self } #[inline] pub fn y_chroma_offset(mut self, y_chroma_offset: ChromaLocation) -> Self { self.y_chroma_offset = y_chroma_offset; self } #[inline] pub fn chroma_filter(mut self, chroma_filter: Filter) -> Self { self.chroma_filter = chroma_filter; self } #[inline] pub fn force_explicit_reconstruction(mut self, force_explicit_reconstruction: bool) -> Self { self.force_explicit_reconstruction = force_explicit_reconstruction.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindImagePlaneMemoryInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub plane_aspect: ImageAspectFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindImagePlaneMemoryInfo<'_> {} unsafe impl Sync for BindImagePlaneMemoryInfo<'_> {} impl ::core::default::Default for BindImagePlaneMemoryInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), plane_aspect: ImageAspectFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindImagePlaneMemoryInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_PLANE_MEMORY_INFO; } unsafe impl ExtendsBindImageMemoryInfo for BindImagePlaneMemoryInfo<'_> {} impl<'a> BindImagePlaneMemoryInfo<'a> { #[inline] pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self { self.plane_aspect = plane_aspect; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImagePlaneMemoryRequirementsInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub plane_aspect: ImageAspectFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImagePlaneMemoryRequirementsInfo<'_> {} unsafe impl Sync for ImagePlaneMemoryRequirementsInfo<'_> {} impl ::core::default::Default for ImagePlaneMemoryRequirementsInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), plane_aspect: ImageAspectFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImagePlaneMemoryRequirementsInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO; } unsafe impl ExtendsImageMemoryRequirementsInfo2 for ImagePlaneMemoryRequirementsInfo<'_> {} impl<'a> ImagePlaneMemoryRequirementsInfo<'a> { #[inline] pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self { self.plane_aspect = plane_aspect; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSamplerYcbcrConversionFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub sampler_ycbcr_conversion: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSamplerYcbcrConversionFeatures<'_> {} unsafe impl Sync for PhysicalDeviceSamplerYcbcrConversionFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceSamplerYcbcrConversionFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), sampler_ycbcr_conversion: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSamplerYcbcrConversionFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSamplerYcbcrConversionFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSamplerYcbcrConversionFeatures<'_> {} impl<'a> PhysicalDeviceSamplerYcbcrConversionFeatures<'a> { #[inline] pub fn sampler_ycbcr_conversion(mut self, sampler_ycbcr_conversion: bool) -> Self { self.sampler_ycbcr_conversion = sampler_ycbcr_conversion.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerYcbcrConversionImageFormatProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub combined_image_sampler_descriptor_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerYcbcrConversionImageFormatProperties<'_> {} unsafe impl Sync for SamplerYcbcrConversionImageFormatProperties<'_> {} impl ::core::default::Default for SamplerYcbcrConversionImageFormatProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), combined_image_sampler_descriptor_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerYcbcrConversionImageFormatProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES; } unsafe impl ExtendsImageFormatProperties2 for SamplerYcbcrConversionImageFormatProperties<'_> {} impl<'a> SamplerYcbcrConversionImageFormatProperties<'a> { #[inline] pub fn combined_image_sampler_descriptor_count( mut self, combined_image_sampler_descriptor_count: u32, ) -> Self { self.combined_image_sampler_descriptor_count = combined_image_sampler_descriptor_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct TextureLODGatherFormatPropertiesAMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supports_texture_gather_lod_bias_amd: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for TextureLODGatherFormatPropertiesAMD<'_> {} unsafe impl Sync for TextureLODGatherFormatPropertiesAMD<'_> {} impl ::core::default::Default for TextureLODGatherFormatPropertiesAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supports_texture_gather_lod_bias_amd: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for TextureLODGatherFormatPropertiesAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD; } unsafe impl ExtendsImageFormatProperties2 for TextureLODGatherFormatPropertiesAMD<'_> {} impl<'a> TextureLODGatherFormatPropertiesAMD<'a> { #[inline] pub fn supports_texture_gather_lod_bias_amd( mut self, supports_texture_gather_lod_bias_amd: bool, ) -> Self { self.supports_texture_gather_lod_bias_amd = supports_texture_gather_lod_bias_amd.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ConditionalRenderingBeginInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: Buffer, pub offset: DeviceSize, pub flags: ConditionalRenderingFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ConditionalRenderingBeginInfoEXT<'_> {} unsafe impl Sync for ConditionalRenderingBeginInfoEXT<'_> {} impl ::core::default::Default for ConditionalRenderingBeginInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: Buffer::default(), offset: DeviceSize::default(), flags: ConditionalRenderingFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ConditionalRenderingBeginInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CONDITIONAL_RENDERING_BEGIN_INFO_EXT; } impl<'a> ConditionalRenderingBeginInfoEXT<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn flags(mut self, flags: ConditionalRenderingFlagsEXT) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ProtectedSubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub protected_submit: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ProtectedSubmitInfo<'_> {} unsafe impl Sync for ProtectedSubmitInfo<'_> {} impl ::core::default::Default for ProtectedSubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), protected_submit: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ProtectedSubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PROTECTED_SUBMIT_INFO; } unsafe impl ExtendsSubmitInfo for ProtectedSubmitInfo<'_> {} impl<'a> ProtectedSubmitInfo<'a> { #[inline] pub fn protected_submit(mut self, protected_submit: bool) -> Self { self.protected_submit = protected_submit.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProtectedMemoryFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub protected_memory: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceProtectedMemoryFeatures<'_> {} unsafe impl Sync for PhysicalDeviceProtectedMemoryFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceProtectedMemoryFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), protected_memory: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceProtectedMemoryFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProtectedMemoryFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProtectedMemoryFeatures<'_> {} impl<'a> PhysicalDeviceProtectedMemoryFeatures<'a> { #[inline] pub fn protected_memory(mut self, protected_memory: bool) -> Self { self.protected_memory = protected_memory.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProtectedMemoryProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub protected_no_fault: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceProtectedMemoryProperties<'_> {} unsafe impl Sync for PhysicalDeviceProtectedMemoryProperties<'_> {} impl ::core::default::Default for PhysicalDeviceProtectedMemoryProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), protected_no_fault: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceProtectedMemoryProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceProtectedMemoryProperties<'_> {} impl<'a> PhysicalDeviceProtectedMemoryProperties<'a> { #[inline] pub fn protected_no_fault(mut self, protected_no_fault: bool) -> Self { self.protected_no_fault = protected_no_fault.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceQueueInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DeviceQueueCreateFlags, pub queue_family_index: u32, pub queue_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceQueueInfo2<'_> {} unsafe impl Sync for DeviceQueueInfo2<'_> {} impl ::core::default::Default for DeviceQueueInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DeviceQueueCreateFlags::default(), queue_family_index: u32::default(), queue_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceQueueInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_INFO_2; } impl<'a> DeviceQueueInfo2<'a> { #[inline] pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn queue_family_index(mut self, queue_family_index: u32) -> Self { self.queue_family_index = queue_family_index; self } #[inline] pub fn queue_index(mut self, queue_index: u32) -> Self { self.queue_index = queue_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCoverageToColorStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCoverageToColorStateCreateFlagsNV, pub coverage_to_color_enable: Bool32, pub coverage_to_color_location: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCoverageToColorStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineCoverageToColorStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineCoverageToColorStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCoverageToColorStateCreateFlagsNV::default(), coverage_to_color_enable: Bool32::default(), coverage_to_color_location: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCoverageToColorStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineCoverageToColorStateCreateInfoNV<'_> { } impl<'a> PipelineCoverageToColorStateCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: PipelineCoverageToColorStateCreateFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn coverage_to_color_enable(mut self, coverage_to_color_enable: bool) -> Self { self.coverage_to_color_enable = coverage_to_color_enable.into(); self } #[inline] pub fn coverage_to_color_location(mut self, coverage_to_color_location: u32) -> Self { self.coverage_to_color_location = coverage_to_color_location; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSamplerFilterMinmaxProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub filter_minmax_single_component_formats: Bool32, pub filter_minmax_image_component_mapping: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSamplerFilterMinmaxProperties<'_> {} unsafe impl Sync for PhysicalDeviceSamplerFilterMinmaxProperties<'_> {} impl ::core::default::Default for PhysicalDeviceSamplerFilterMinmaxProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), filter_minmax_single_component_formats: Bool32::default(), filter_minmax_image_component_mapping: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSamplerFilterMinmaxProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSamplerFilterMinmaxProperties<'_> {} impl<'a> PhysicalDeviceSamplerFilterMinmaxProperties<'a> { #[inline] pub fn filter_minmax_single_component_formats( mut self, filter_minmax_single_component_formats: bool, ) -> Self { self.filter_minmax_single_component_formats = filter_minmax_single_component_formats.into(); self } #[inline] pub fn filter_minmax_image_component_mapping( mut self, filter_minmax_image_component_mapping: bool, ) -> Self { self.filter_minmax_image_component_mapping = filter_minmax_image_component_mapping.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SampleLocationEXT { pub x: f32, pub y: f32, } impl SampleLocationEXT { #[inline] pub fn x(mut self, x: f32) -> Self { self.x = x; self } #[inline] pub fn y(mut self, y: f32) -> Self { self.y = y; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SampleLocationsInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub sample_locations_per_pixel: SampleCountFlags, pub sample_location_grid_size: Extent2D, pub sample_locations_count: u32, pub p_sample_locations: *const SampleLocationEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SampleLocationsInfoEXT<'_> {} unsafe impl Sync for SampleLocationsInfoEXT<'_> {} impl ::core::default::Default for SampleLocationsInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), sample_locations_per_pixel: SampleCountFlags::default(), sample_location_grid_size: Extent2D::default(), sample_locations_count: u32::default(), p_sample_locations: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SampleLocationsInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLE_LOCATIONS_INFO_EXT; } unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXT<'_> {} unsafe impl ExtendsImageMemoryBarrier2 for SampleLocationsInfoEXT<'_> {} impl<'a> SampleLocationsInfoEXT<'a> { #[inline] pub fn sample_locations_per_pixel( mut self, sample_locations_per_pixel: SampleCountFlags, ) -> Self { self.sample_locations_per_pixel = sample_locations_per_pixel; self } #[inline] pub fn sample_location_grid_size(mut self, sample_location_grid_size: Extent2D) -> Self { self.sample_location_grid_size = sample_location_grid_size; self } #[inline] pub fn sample_locations(mut self, sample_locations: &'a [SampleLocationEXT]) -> Self { self.sample_locations_count = sample_locations.len() as _; self.p_sample_locations = sample_locations.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AttachmentSampleLocationsEXT<'a> { pub attachment_index: u32, pub sample_locations_info: SampleLocationsInfoEXT<'a>, pub _marker: PhantomData<&'a ()>, } impl<'a> AttachmentSampleLocationsEXT<'a> { #[inline] pub fn attachment_index(mut self, attachment_index: u32) -> Self { self.attachment_index = attachment_index; self } #[inline] pub fn sample_locations_info( mut self, sample_locations_info: SampleLocationsInfoEXT<'a>, ) -> Self { self.sample_locations_info = sample_locations_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SubpassSampleLocationsEXT<'a> { pub subpass_index: u32, pub sample_locations_info: SampleLocationsInfoEXT<'a>, pub _marker: PhantomData<&'a ()>, } impl<'a> SubpassSampleLocationsEXT<'a> { #[inline] pub fn subpass_index(mut self, subpass_index: u32) -> Self { self.subpass_index = subpass_index; self } #[inline] pub fn sample_locations_info( mut self, sample_locations_info: SampleLocationsInfoEXT<'a>, ) -> Self { self.sample_locations_info = sample_locations_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassSampleLocationsBeginInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub attachment_initial_sample_locations_count: u32, pub p_attachment_initial_sample_locations: *const AttachmentSampleLocationsEXT<'a>, pub post_subpass_sample_locations_count: u32, pub p_post_subpass_sample_locations: *const SubpassSampleLocationsEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassSampleLocationsBeginInfoEXT<'_> {} unsafe impl Sync for RenderPassSampleLocationsBeginInfoEXT<'_> {} impl ::core::default::Default for RenderPassSampleLocationsBeginInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), attachment_initial_sample_locations_count: u32::default(), p_attachment_initial_sample_locations: ::core::ptr::null(), post_subpass_sample_locations_count: u32::default(), p_post_subpass_sample_locations: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassSampleLocationsBeginInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT; } unsafe impl ExtendsRenderPassBeginInfo for RenderPassSampleLocationsBeginInfoEXT<'_> {} impl<'a> RenderPassSampleLocationsBeginInfoEXT<'a> { #[inline] pub fn attachment_initial_sample_locations( mut self, attachment_initial_sample_locations: &'a [AttachmentSampleLocationsEXT<'a>], ) -> Self { self.attachment_initial_sample_locations_count = attachment_initial_sample_locations.len() as _; self.p_attachment_initial_sample_locations = attachment_initial_sample_locations.as_ptr(); self } #[inline] pub fn post_subpass_sample_locations( mut self, post_subpass_sample_locations: &'a [SubpassSampleLocationsEXT<'a>], ) -> Self { self.post_subpass_sample_locations_count = post_subpass_sample_locations.len() as _; self.p_post_subpass_sample_locations = post_subpass_sample_locations.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineSampleLocationsStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub sample_locations_enable: Bool32, pub sample_locations_info: SampleLocationsInfoEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineSampleLocationsStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineSampleLocationsStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineSampleLocationsStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), sample_locations_enable: Bool32::default(), sample_locations_info: SampleLocationsInfoEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineSampleLocationsStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineSampleLocationsStateCreateInfoEXT<'_> { } impl<'a> PipelineSampleLocationsStateCreateInfoEXT<'a> { #[inline] pub fn sample_locations_enable(mut self, sample_locations_enable: bool) -> Self { self.sample_locations_enable = sample_locations_enable.into(); self } #[inline] pub fn sample_locations_info( mut self, sample_locations_info: SampleLocationsInfoEXT<'a>, ) -> Self { self.sample_locations_info = sample_locations_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSampleLocationsPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub sample_location_sample_counts: SampleCountFlags, pub max_sample_location_grid_size: Extent2D, pub sample_location_coordinate_range: [f32; 2], pub sample_location_sub_pixel_bits: u32, pub variable_sample_locations: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSampleLocationsPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceSampleLocationsPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceSampleLocationsPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), sample_location_sample_counts: SampleCountFlags::default(), max_sample_location_grid_size: Extent2D::default(), sample_location_coordinate_range: unsafe { ::core::mem::zeroed() }, sample_location_sub_pixel_bits: u32::default(), variable_sample_locations: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSampleLocationsPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSampleLocationsPropertiesEXT<'_> {} impl<'a> PhysicalDeviceSampleLocationsPropertiesEXT<'a> { #[inline] pub fn sample_location_sample_counts( mut self, sample_location_sample_counts: SampleCountFlags, ) -> Self { self.sample_location_sample_counts = sample_location_sample_counts; self } #[inline] pub fn max_sample_location_grid_size( mut self, max_sample_location_grid_size: Extent2D, ) -> Self { self.max_sample_location_grid_size = max_sample_location_grid_size; self } #[inline] pub fn sample_location_coordinate_range( mut self, sample_location_coordinate_range: [f32; 2], ) -> Self { self.sample_location_coordinate_range = sample_location_coordinate_range; self } #[inline] pub fn sample_location_sub_pixel_bits(mut self, sample_location_sub_pixel_bits: u32) -> Self { self.sample_location_sub_pixel_bits = sample_location_sub_pixel_bits; self } #[inline] pub fn variable_sample_locations(mut self, variable_sample_locations: bool) -> Self { self.variable_sample_locations = variable_sample_locations.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MultisamplePropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_sample_location_grid_size: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MultisamplePropertiesEXT<'_> {} unsafe impl Sync for MultisamplePropertiesEXT<'_> {} impl ::core::default::Default for MultisamplePropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_sample_location_grid_size: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MultisamplePropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MULTISAMPLE_PROPERTIES_EXT; } impl<'a> MultisamplePropertiesEXT<'a> { #[inline] pub fn max_sample_location_grid_size( mut self, max_sample_location_grid_size: Extent2D, ) -> Self { self.max_sample_location_grid_size = max_sample_location_grid_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerReductionModeCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub reduction_mode: SamplerReductionMode, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerReductionModeCreateInfo<'_> {} unsafe impl Sync for SamplerReductionModeCreateInfo<'_> {} impl ::core::default::Default for SamplerReductionModeCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), reduction_mode: SamplerReductionMode::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerReductionModeCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_REDUCTION_MODE_CREATE_INFO; } unsafe impl ExtendsSamplerCreateInfo for SamplerReductionModeCreateInfo<'_> {} impl<'a> SamplerReductionModeCreateInfo<'a> { #[inline] pub fn reduction_mode(mut self, reduction_mode: SamplerReductionMode) -> Self { self.reduction_mode = reduction_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub advanced_blend_coherent_operations: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), advanced_blend_coherent_operations: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'_> {} impl<'a> PhysicalDeviceBlendOperationAdvancedFeaturesEXT<'a> { #[inline] pub fn advanced_blend_coherent_operations( mut self, advanced_blend_coherent_operations: bool, ) -> Self { self.advanced_blend_coherent_operations = advanced_blend_coherent_operations.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiDrawFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub multi_draw: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiDrawFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMultiDrawFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMultiDrawFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), multi_draw: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiDrawFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiDrawFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiDrawFeaturesEXT<'_> {} impl<'a> PhysicalDeviceMultiDrawFeaturesEXT<'a> { #[inline] pub fn multi_draw(mut self, multi_draw: bool) -> Self { self.multi_draw = multi_draw.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub advanced_blend_max_color_attachments: u32, pub advanced_blend_independent_blend: Bool32, pub advanced_blend_non_premultiplied_src_color: Bool32, pub advanced_blend_non_premultiplied_dst_color: Bool32, pub advanced_blend_correlated_overlap: Bool32, pub advanced_blend_all_operations: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), advanced_blend_max_color_attachments: u32::default(), advanced_blend_independent_blend: Bool32::default(), advanced_blend_non_premultiplied_src_color: Bool32::default(), advanced_blend_non_premultiplied_dst_color: Bool32::default(), advanced_blend_correlated_overlap: Bool32::default(), advanced_blend_all_operations: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'_> { } impl<'a> PhysicalDeviceBlendOperationAdvancedPropertiesEXT<'a> { #[inline] pub fn advanced_blend_max_color_attachments( mut self, advanced_blend_max_color_attachments: u32, ) -> Self { self.advanced_blend_max_color_attachments = advanced_blend_max_color_attachments; self } #[inline] pub fn advanced_blend_independent_blend( mut self, advanced_blend_independent_blend: bool, ) -> Self { self.advanced_blend_independent_blend = advanced_blend_independent_blend.into(); self } #[inline] pub fn advanced_blend_non_premultiplied_src_color( mut self, advanced_blend_non_premultiplied_src_color: bool, ) -> Self { self.advanced_blend_non_premultiplied_src_color = advanced_blend_non_premultiplied_src_color.into(); self } #[inline] pub fn advanced_blend_non_premultiplied_dst_color( mut self, advanced_blend_non_premultiplied_dst_color: bool, ) -> Self { self.advanced_blend_non_premultiplied_dst_color = advanced_blend_non_premultiplied_dst_color.into(); self } #[inline] pub fn advanced_blend_correlated_overlap( mut self, advanced_blend_correlated_overlap: bool, ) -> Self { self.advanced_blend_correlated_overlap = advanced_blend_correlated_overlap.into(); self } #[inline] pub fn advanced_blend_all_operations(mut self, advanced_blend_all_operations: bool) -> Self { self.advanced_blend_all_operations = advanced_blend_all_operations.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineColorBlendAdvancedStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_premultiplied: Bool32, pub dst_premultiplied: Bool32, pub blend_overlap: BlendOverlapEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineColorBlendAdvancedStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineColorBlendAdvancedStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineColorBlendAdvancedStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_premultiplied: Bool32::default(), dst_premultiplied: Bool32::default(), blend_overlap: BlendOverlapEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineColorBlendAdvancedStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineColorBlendStateCreateInfo for PipelineColorBlendAdvancedStateCreateInfoEXT<'_> { } impl<'a> PipelineColorBlendAdvancedStateCreateInfoEXT<'a> { #[inline] pub fn src_premultiplied(mut self, src_premultiplied: bool) -> Self { self.src_premultiplied = src_premultiplied.into(); self } #[inline] pub fn dst_premultiplied(mut self, dst_premultiplied: bool) -> Self { self.dst_premultiplied = dst_premultiplied.into(); self } #[inline] pub fn blend_overlap(mut self, blend_overlap: BlendOverlapEXT) -> Self { self.blend_overlap = blend_overlap; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceInlineUniformBlockFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub inline_uniform_block: Bool32, pub descriptor_binding_inline_uniform_block_update_after_bind: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceInlineUniformBlockFeatures<'_> {} unsafe impl Sync for PhysicalDeviceInlineUniformBlockFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceInlineUniformBlockFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), inline_uniform_block: Bool32::default(), descriptor_binding_inline_uniform_block_update_after_bind: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceInlineUniformBlockFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInlineUniformBlockFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInlineUniformBlockFeatures<'_> {} impl<'a> PhysicalDeviceInlineUniformBlockFeatures<'a> { #[inline] pub fn inline_uniform_block(mut self, inline_uniform_block: bool) -> Self { self.inline_uniform_block = inline_uniform_block.into(); self } #[inline] pub fn descriptor_binding_inline_uniform_block_update_after_bind( mut self, descriptor_binding_inline_uniform_block_update_after_bind: bool, ) -> Self { self.descriptor_binding_inline_uniform_block_update_after_bind = descriptor_binding_inline_uniform_block_update_after_bind.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceInlineUniformBlockProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_inline_uniform_block_size: u32, pub max_per_stage_descriptor_inline_uniform_blocks: u32, pub max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32, pub max_descriptor_set_inline_uniform_blocks: u32, pub max_descriptor_set_update_after_bind_inline_uniform_blocks: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceInlineUniformBlockProperties<'_> {} unsafe impl Sync for PhysicalDeviceInlineUniformBlockProperties<'_> {} impl ::core::default::Default for PhysicalDeviceInlineUniformBlockProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_inline_uniform_block_size: u32::default(), max_per_stage_descriptor_inline_uniform_blocks: u32::default(), max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32::default(), max_descriptor_set_inline_uniform_blocks: u32::default(), max_descriptor_set_update_after_bind_inline_uniform_blocks: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceInlineUniformBlockProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceInlineUniformBlockProperties<'_> {} impl<'a> PhysicalDeviceInlineUniformBlockProperties<'a> { #[inline] pub fn max_inline_uniform_block_size(mut self, max_inline_uniform_block_size: u32) -> Self { self.max_inline_uniform_block_size = max_inline_uniform_block_size; self } #[inline] pub fn max_per_stage_descriptor_inline_uniform_blocks( mut self, max_per_stage_descriptor_inline_uniform_blocks: u32, ) -> Self { self.max_per_stage_descriptor_inline_uniform_blocks = max_per_stage_descriptor_inline_uniform_blocks; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_inline_uniform_blocks( mut self, max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_inline_uniform_blocks = max_per_stage_descriptor_update_after_bind_inline_uniform_blocks; self } #[inline] pub fn max_descriptor_set_inline_uniform_blocks( mut self, max_descriptor_set_inline_uniform_blocks: u32, ) -> Self { self.max_descriptor_set_inline_uniform_blocks = max_descriptor_set_inline_uniform_blocks; self } #[inline] pub fn max_descriptor_set_update_after_bind_inline_uniform_blocks( mut self, max_descriptor_set_update_after_bind_inline_uniform_blocks: u32, ) -> Self { self.max_descriptor_set_update_after_bind_inline_uniform_blocks = max_descriptor_set_update_after_bind_inline_uniform_blocks; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct WriteDescriptorSetInlineUniformBlock<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub data_size: u32, pub p_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for WriteDescriptorSetInlineUniformBlock<'_> {} unsafe impl Sync for WriteDescriptorSetInlineUniformBlock<'_> {} impl ::core::default::Default for WriteDescriptorSetInlineUniformBlock<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), data_size: u32::default(), p_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for WriteDescriptorSetInlineUniformBlock<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK; } unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetInlineUniformBlock<'_> {} impl<'a> WriteDescriptorSetInlineUniformBlock<'a> { #[inline] pub fn data(mut self, data: &'a [u8]) -> Self { self.data_size = data.len() as _; self.p_data = data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorPoolInlineUniformBlockCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_inline_uniform_block_bindings: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorPoolInlineUniformBlockCreateInfo<'_> {} unsafe impl Sync for DescriptorPoolInlineUniformBlockCreateInfo<'_> {} impl ::core::default::Default for DescriptorPoolInlineUniformBlockCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_inline_uniform_block_bindings: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorPoolInlineUniformBlockCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO; } unsafe impl ExtendsDescriptorPoolCreateInfo for DescriptorPoolInlineUniformBlockCreateInfo<'_> {} impl<'a> DescriptorPoolInlineUniformBlockCreateInfo<'a> { #[inline] pub fn max_inline_uniform_block_bindings( mut self, max_inline_uniform_block_bindings: u32, ) -> Self { self.max_inline_uniform_block_bindings = max_inline_uniform_block_bindings; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCoverageModulationStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCoverageModulationStateCreateFlagsNV, pub coverage_modulation_mode: CoverageModulationModeNV, pub coverage_modulation_table_enable: Bool32, pub coverage_modulation_table_count: u32, pub p_coverage_modulation_table: *const f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCoverageModulationStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineCoverageModulationStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineCoverageModulationStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCoverageModulationStateCreateFlagsNV::default(), coverage_modulation_mode: CoverageModulationModeNV::default(), coverage_modulation_table_enable: Bool32::default(), coverage_modulation_table_count: u32::default(), p_coverage_modulation_table: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCoverageModulationStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineCoverageModulationStateCreateInfoNV<'_> { } impl<'a> PipelineCoverageModulationStateCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: PipelineCoverageModulationStateCreateFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn coverage_modulation_mode( mut self, coverage_modulation_mode: CoverageModulationModeNV, ) -> Self { self.coverage_modulation_mode = coverage_modulation_mode; self } #[inline] pub fn coverage_modulation_table_enable( mut self, coverage_modulation_table_enable: bool, ) -> Self { self.coverage_modulation_table_enable = coverage_modulation_table_enable.into(); self } #[inline] pub fn coverage_modulation_table(mut self, coverage_modulation_table: &'a [f32]) -> Self { self.coverage_modulation_table_count = coverage_modulation_table.len() as _; self.p_coverage_modulation_table = coverage_modulation_table.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageFormatListCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub view_format_count: u32, pub p_view_formats: *const Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageFormatListCreateInfo<'_> {} unsafe impl Sync for ImageFormatListCreateInfo<'_> {} impl ::core::default::Default for ImageFormatListCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), view_format_count: u32::default(), p_view_formats: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageFormatListCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_LIST_CREATE_INFO; } unsafe impl ExtendsImageCreateInfo for ImageFormatListCreateInfo<'_> {} unsafe impl ExtendsSwapchainCreateInfoKHR for ImageFormatListCreateInfo<'_> {} unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageFormatListCreateInfo<'_> {} impl<'a> ImageFormatListCreateInfo<'a> { #[inline] pub fn view_formats(mut self, view_formats: &'a [Format]) -> Self { self.view_format_count = view_formats.len() as _; self.p_view_formats = view_formats.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ValidationCacheCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ValidationCacheCreateFlagsEXT, pub initial_data_size: usize, pub p_initial_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ValidationCacheCreateInfoEXT<'_> {} unsafe impl Sync for ValidationCacheCreateInfoEXT<'_> {} impl ::core::default::Default for ValidationCacheCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ValidationCacheCreateFlagsEXT::default(), initial_data_size: usize::default(), p_initial_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ValidationCacheCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_CACHE_CREATE_INFO_EXT; } impl<'a> ValidationCacheCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: ValidationCacheCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn initial_data(mut self, initial_data: &'a [u8]) -> Self { self.initial_data_size = initial_data.len(); self.p_initial_data = initial_data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShaderModuleValidationCacheCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub validation_cache: ValidationCacheEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ShaderModuleValidationCacheCreateInfoEXT<'_> {} unsafe impl Sync for ShaderModuleValidationCacheCreateInfoEXT<'_> {} impl ::core::default::Default for ShaderModuleValidationCacheCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), validation_cache: ValidationCacheEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ShaderModuleValidationCacheCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT; } unsafe impl ExtendsShaderModuleCreateInfo for ShaderModuleValidationCacheCreateInfoEXT<'_> {} unsafe impl ExtendsPipelineShaderStageCreateInfo for ShaderModuleValidationCacheCreateInfoEXT<'_> {} impl<'a> ShaderModuleValidationCacheCreateInfoEXT<'a> { #[inline] pub fn validation_cache(mut self, validation_cache: ValidationCacheEXT) -> Self { self.validation_cache = validation_cache; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance3Properties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_per_set_descriptors: u32, pub max_memory_allocation_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance3Properties<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance3Properties<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance3Properties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_per_set_descriptors: u32::default(), max_memory_allocation_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance3Properties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance3Properties<'_> {} impl<'a> PhysicalDeviceMaintenance3Properties<'a> { #[inline] pub fn max_per_set_descriptors(mut self, max_per_set_descriptors: u32) -> Self { self.max_per_set_descriptors = max_per_set_descriptors; self } #[inline] pub fn max_memory_allocation_size(mut self, max_memory_allocation_size: DeviceSize) -> Self { self.max_memory_allocation_size = max_memory_allocation_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance4Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub maintenance4: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance4Features<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance4Features<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance4Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), maintenance4: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance4Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMaintenance4Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMaintenance4Features<'_> {} impl<'a> PhysicalDeviceMaintenance4Features<'a> { #[inline] pub fn maintenance4(mut self, maintenance4: bool) -> Self { self.maintenance4 = maintenance4.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance4Properties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_buffer_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance4Properties<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance4Properties<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance4Properties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_buffer_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance4Properties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance4Properties<'_> {} impl<'a> PhysicalDeviceMaintenance4Properties<'a> { #[inline] pub fn max_buffer_size(mut self, max_buffer_size: DeviceSize) -> Self { self.max_buffer_size = max_buffer_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance5FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub maintenance5: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance5FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance5FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance5FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), maintenance5: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance5FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMaintenance5FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMaintenance5FeaturesKHR<'_> {} impl<'a> PhysicalDeviceMaintenance5FeaturesKHR<'a> { #[inline] pub fn maintenance5(mut self, maintenance5: bool) -> Self { self.maintenance5 = maintenance5.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance5PropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub early_fragment_multisample_coverage_after_sample_counting: Bool32, pub early_fragment_sample_mask_test_before_sample_counting: Bool32, pub depth_stencil_swizzle_one_support: Bool32, pub polygon_mode_point_size: Bool32, pub non_strict_single_pixel_wide_lines_use_parallelogram: Bool32, pub non_strict_wide_lines_use_parallelogram: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance5PropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance5PropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance5PropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), early_fragment_multisample_coverage_after_sample_counting: Bool32::default(), early_fragment_sample_mask_test_before_sample_counting: Bool32::default(), depth_stencil_swizzle_one_support: Bool32::default(), polygon_mode_point_size: Bool32::default(), non_strict_single_pixel_wide_lines_use_parallelogram: Bool32::default(), non_strict_wide_lines_use_parallelogram: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance5PropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance5PropertiesKHR<'_> {} impl<'a> PhysicalDeviceMaintenance5PropertiesKHR<'a> { #[inline] pub fn early_fragment_multisample_coverage_after_sample_counting( mut self, early_fragment_multisample_coverage_after_sample_counting: bool, ) -> Self { self.early_fragment_multisample_coverage_after_sample_counting = early_fragment_multisample_coverage_after_sample_counting.into(); self } #[inline] pub fn early_fragment_sample_mask_test_before_sample_counting( mut self, early_fragment_sample_mask_test_before_sample_counting: bool, ) -> Self { self.early_fragment_sample_mask_test_before_sample_counting = early_fragment_sample_mask_test_before_sample_counting.into(); self } #[inline] pub fn depth_stencil_swizzle_one_support( mut self, depth_stencil_swizzle_one_support: bool, ) -> Self { self.depth_stencil_swizzle_one_support = depth_stencil_swizzle_one_support.into(); self } #[inline] pub fn polygon_mode_point_size(mut self, polygon_mode_point_size: bool) -> Self { self.polygon_mode_point_size = polygon_mode_point_size.into(); self } #[inline] pub fn non_strict_single_pixel_wide_lines_use_parallelogram( mut self, non_strict_single_pixel_wide_lines_use_parallelogram: bool, ) -> Self { self.non_strict_single_pixel_wide_lines_use_parallelogram = non_strict_single_pixel_wide_lines_use_parallelogram.into(); self } #[inline] pub fn non_strict_wide_lines_use_parallelogram( mut self, non_strict_wide_lines_use_parallelogram: bool, ) -> Self { self.non_strict_wide_lines_use_parallelogram = non_strict_wide_lines_use_parallelogram.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance6FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub maintenance6: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance6FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance6FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance6FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), maintenance6: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance6FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMaintenance6FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMaintenance6FeaturesKHR<'_> {} impl<'a> PhysicalDeviceMaintenance6FeaturesKHR<'a> { #[inline] pub fn maintenance6(mut self, maintenance6: bool) -> Self { self.maintenance6 = maintenance6.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMaintenance6PropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub block_texel_view_compatible_multiple_layers: Bool32, pub max_combined_image_sampler_descriptor_count: u32, pub fragment_shading_rate_clamp_combiner_inputs: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMaintenance6PropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceMaintenance6PropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceMaintenance6PropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), block_texel_view_compatible_multiple_layers: Bool32::default(), max_combined_image_sampler_descriptor_count: u32::default(), fragment_shading_rate_clamp_combiner_inputs: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMaintenance6PropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance6PropertiesKHR<'_> {} impl<'a> PhysicalDeviceMaintenance6PropertiesKHR<'a> { #[inline] pub fn block_texel_view_compatible_multiple_layers( mut self, block_texel_view_compatible_multiple_layers: bool, ) -> Self { self.block_texel_view_compatible_multiple_layers = block_texel_view_compatible_multiple_layers.into(); self } #[inline] pub fn max_combined_image_sampler_descriptor_count( mut self, max_combined_image_sampler_descriptor_count: u32, ) -> Self { self.max_combined_image_sampler_descriptor_count = max_combined_image_sampler_descriptor_count; self } #[inline] pub fn fragment_shading_rate_clamp_combiner_inputs( mut self, fragment_shading_rate_clamp_combiner_inputs: bool, ) -> Self { self.fragment_shading_rate_clamp_combiner_inputs = fragment_shading_rate_clamp_combiner_inputs.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingAreaInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub view_mask: u32, pub color_attachment_count: u32, pub p_color_attachment_formats: *const Format, pub depth_attachment_format: Format, pub stencil_attachment_format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingAreaInfoKHR<'_> {} unsafe impl Sync for RenderingAreaInfoKHR<'_> {} impl ::core::default::Default for RenderingAreaInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), view_mask: u32::default(), color_attachment_count: u32::default(), p_color_attachment_formats: ::core::ptr::null(), depth_attachment_format: Format::default(), stencil_attachment_format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingAreaInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_AREA_INFO_KHR; } impl<'a> RenderingAreaInfoKHR<'a> { #[inline] pub fn view_mask(mut self, view_mask: u32) -> Self { self.view_mask = view_mask; self } #[inline] pub fn color_attachment_formats(mut self, color_attachment_formats: &'a [Format]) -> Self { self.color_attachment_count = color_attachment_formats.len() as _; self.p_color_attachment_formats = color_attachment_formats.as_ptr(); self } #[inline] pub fn depth_attachment_format(mut self, depth_attachment_format: Format) -> Self { self.depth_attachment_format = depth_attachment_format; self } #[inline] pub fn stencil_attachment_format(mut self, stencil_attachment_format: Format) -> Self { self.stencil_attachment_format = stencil_attachment_format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetLayoutSupport<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supported: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetLayoutSupport<'_> {} unsafe impl Sync for DescriptorSetLayoutSupport<'_> {} impl ::core::default::Default for DescriptorSetLayoutSupport<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supported: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetLayoutSupport<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_SUPPORT; } pub unsafe trait ExtendsDescriptorSetLayoutSupport {} impl<'a> DescriptorSetLayoutSupport<'a> { #[inline] pub fn supported(mut self, supported: bool) -> Self { self.supported = supported.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderDrawParametersFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_draw_parameters: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderDrawParametersFeatures<'_> {} unsafe impl Sync for PhysicalDeviceShaderDrawParametersFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceShaderDrawParametersFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_draw_parameters: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderDrawParametersFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderDrawParametersFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDrawParametersFeatures<'_> {} impl<'a> PhysicalDeviceShaderDrawParametersFeatures<'a> { #[inline] pub fn shader_draw_parameters(mut self, shader_draw_parameters: bool) -> Self { self.shader_draw_parameters = shader_draw_parameters.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderFloat16Int8Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_float16: Bool32, pub shader_int8: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderFloat16Int8Features<'_> {} unsafe impl Sync for PhysicalDeviceShaderFloat16Int8Features<'_> {} impl ::core::default::Default for PhysicalDeviceShaderFloat16Int8Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_float16: Bool32::default(), shader_int8: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderFloat16Int8Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderFloat16Int8Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderFloat16Int8Features<'_> {} impl<'a> PhysicalDeviceShaderFloat16Int8Features<'a> { #[inline] pub fn shader_float16(mut self, shader_float16: bool) -> Self { self.shader_float16 = shader_float16.into(); self } #[inline] pub fn shader_int8(mut self, shader_int8: bool) -> Self { self.shader_int8 = shader_int8.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFloatControlsProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub denorm_behavior_independence: ShaderFloatControlsIndependence, pub rounding_mode_independence: ShaderFloatControlsIndependence, pub shader_signed_zero_inf_nan_preserve_float16: Bool32, pub shader_signed_zero_inf_nan_preserve_float32: Bool32, pub shader_signed_zero_inf_nan_preserve_float64: Bool32, pub shader_denorm_preserve_float16: Bool32, pub shader_denorm_preserve_float32: Bool32, pub shader_denorm_preserve_float64: Bool32, pub shader_denorm_flush_to_zero_float16: Bool32, pub shader_denorm_flush_to_zero_float32: Bool32, pub shader_denorm_flush_to_zero_float64: Bool32, pub shader_rounding_mode_rte_float16: Bool32, pub shader_rounding_mode_rte_float32: Bool32, pub shader_rounding_mode_rte_float64: Bool32, pub shader_rounding_mode_rtz_float16: Bool32, pub shader_rounding_mode_rtz_float32: Bool32, pub shader_rounding_mode_rtz_float64: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFloatControlsProperties<'_> {} unsafe impl Sync for PhysicalDeviceFloatControlsProperties<'_> {} impl ::core::default::Default for PhysicalDeviceFloatControlsProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), denorm_behavior_independence: ShaderFloatControlsIndependence::default(), rounding_mode_independence: ShaderFloatControlsIndependence::default(), shader_signed_zero_inf_nan_preserve_float16: Bool32::default(), shader_signed_zero_inf_nan_preserve_float32: Bool32::default(), shader_signed_zero_inf_nan_preserve_float64: Bool32::default(), shader_denorm_preserve_float16: Bool32::default(), shader_denorm_preserve_float32: Bool32::default(), shader_denorm_preserve_float64: Bool32::default(), shader_denorm_flush_to_zero_float16: Bool32::default(), shader_denorm_flush_to_zero_float32: Bool32::default(), shader_denorm_flush_to_zero_float64: Bool32::default(), shader_rounding_mode_rte_float16: Bool32::default(), shader_rounding_mode_rte_float32: Bool32::default(), shader_rounding_mode_rte_float64: Bool32::default(), shader_rounding_mode_rtz_float16: Bool32::default(), shader_rounding_mode_rtz_float32: Bool32::default(), shader_rounding_mode_rtz_float64: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFloatControlsProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFloatControlsProperties<'_> {} impl<'a> PhysicalDeviceFloatControlsProperties<'a> { #[inline] pub fn denorm_behavior_independence( mut self, denorm_behavior_independence: ShaderFloatControlsIndependence, ) -> Self { self.denorm_behavior_independence = denorm_behavior_independence; self } #[inline] pub fn rounding_mode_independence( mut self, rounding_mode_independence: ShaderFloatControlsIndependence, ) -> Self { self.rounding_mode_independence = rounding_mode_independence; self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float16( mut self, shader_signed_zero_inf_nan_preserve_float16: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float16 = shader_signed_zero_inf_nan_preserve_float16.into(); self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float32( mut self, shader_signed_zero_inf_nan_preserve_float32: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float32 = shader_signed_zero_inf_nan_preserve_float32.into(); self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float64( mut self, shader_signed_zero_inf_nan_preserve_float64: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float64 = shader_signed_zero_inf_nan_preserve_float64.into(); self } #[inline] pub fn shader_denorm_preserve_float16(mut self, shader_denorm_preserve_float16: bool) -> Self { self.shader_denorm_preserve_float16 = shader_denorm_preserve_float16.into(); self } #[inline] pub fn shader_denorm_preserve_float32(mut self, shader_denorm_preserve_float32: bool) -> Self { self.shader_denorm_preserve_float32 = shader_denorm_preserve_float32.into(); self } #[inline] pub fn shader_denorm_preserve_float64(mut self, shader_denorm_preserve_float64: bool) -> Self { self.shader_denorm_preserve_float64 = shader_denorm_preserve_float64.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float16( mut self, shader_denorm_flush_to_zero_float16: bool, ) -> Self { self.shader_denorm_flush_to_zero_float16 = shader_denorm_flush_to_zero_float16.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float32( mut self, shader_denorm_flush_to_zero_float32: bool, ) -> Self { self.shader_denorm_flush_to_zero_float32 = shader_denorm_flush_to_zero_float32.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float64( mut self, shader_denorm_flush_to_zero_float64: bool, ) -> Self { self.shader_denorm_flush_to_zero_float64 = shader_denorm_flush_to_zero_float64.into(); self } #[inline] pub fn shader_rounding_mode_rte_float16( mut self, shader_rounding_mode_rte_float16: bool, ) -> Self { self.shader_rounding_mode_rte_float16 = shader_rounding_mode_rte_float16.into(); self } #[inline] pub fn shader_rounding_mode_rte_float32( mut self, shader_rounding_mode_rte_float32: bool, ) -> Self { self.shader_rounding_mode_rte_float32 = shader_rounding_mode_rte_float32.into(); self } #[inline] pub fn shader_rounding_mode_rte_float64( mut self, shader_rounding_mode_rte_float64: bool, ) -> Self { self.shader_rounding_mode_rte_float64 = shader_rounding_mode_rte_float64.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float16( mut self, shader_rounding_mode_rtz_float16: bool, ) -> Self { self.shader_rounding_mode_rtz_float16 = shader_rounding_mode_rtz_float16.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float32( mut self, shader_rounding_mode_rtz_float32: bool, ) -> Self { self.shader_rounding_mode_rtz_float32 = shader_rounding_mode_rtz_float32.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float64( mut self, shader_rounding_mode_rtz_float64: bool, ) -> Self { self.shader_rounding_mode_rtz_float64 = shader_rounding_mode_rtz_float64.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceHostQueryResetFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub host_query_reset: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceHostQueryResetFeatures<'_> {} unsafe impl Sync for PhysicalDeviceHostQueryResetFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceHostQueryResetFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), host_query_reset: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceHostQueryResetFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceHostQueryResetFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceHostQueryResetFeatures<'_> {} impl<'a> PhysicalDeviceHostQueryResetFeatures<'a> { #[inline] pub fn host_query_reset(mut self, host_query_reset: bool) -> Self { self.host_query_reset = host_query_reset.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct NativeBufferUsage2ANDROID { pub consumer: u64, pub producer: u64, } impl NativeBufferUsage2ANDROID { #[inline] pub fn consumer(mut self, consumer: u64) -> Self { self.consumer = consumer; self } #[inline] pub fn producer(mut self, producer: u64) -> Self { self.producer = producer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct NativeBufferANDROID<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle: *const c_void, pub stride: c_int, pub format: c_int, pub usage: c_int, pub usage2: NativeBufferUsage2ANDROID, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for NativeBufferANDROID<'_> {} unsafe impl Sync for NativeBufferANDROID<'_> {} impl ::core::default::Default for NativeBufferANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle: ::core::ptr::null(), stride: c_int::default(), format: c_int::default(), usage: c_int::default(), usage2: NativeBufferUsage2ANDROID::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for NativeBufferANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::NATIVE_BUFFER_ANDROID; } impl<'a> NativeBufferANDROID<'a> { #[inline] pub fn handle(mut self, handle: *const c_void) -> Self { self.handle = handle; self } #[inline] pub fn stride(mut self, stride: c_int) -> Self { self.stride = stride; self } #[inline] pub fn format(mut self, format: c_int) -> Self { self.format = format; self } #[inline] pub fn usage(mut self, usage: c_int) -> Self { self.usage = usage; self } #[inline] pub fn usage2(mut self, usage2: NativeBufferUsage2ANDROID) -> Self { self.usage2 = usage2; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainImageCreateInfoANDROID<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub usage: SwapchainImageUsageFlagsANDROID, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainImageCreateInfoANDROID<'_> {} unsafe impl Sync for SwapchainImageCreateInfoANDROID<'_> {} impl ::core::default::Default for SwapchainImageCreateInfoANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), usage: SwapchainImageUsageFlagsANDROID::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainImageCreateInfoANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID; } impl<'a> SwapchainImageCreateInfoANDROID<'a> { #[inline] pub fn usage(mut self, usage: SwapchainImageUsageFlagsANDROID) -> Self { self.usage = usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePresentationPropertiesANDROID<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub shared_image: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePresentationPropertiesANDROID<'_> {} unsafe impl Sync for PhysicalDevicePresentationPropertiesANDROID<'_> {} impl ::core::default::Default for PhysicalDevicePresentationPropertiesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), shared_image: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePresentationPropertiesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID; } impl<'a> PhysicalDevicePresentationPropertiesANDROID<'a> { #[inline] pub fn shared_image(mut self, shared_image: bool) -> Self { self.shared_image = shared_image.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ShaderResourceUsageAMD { pub num_used_vgprs: u32, pub num_used_sgprs: u32, pub lds_size_per_local_work_group: u32, pub lds_usage_size_in_bytes: usize, pub scratch_mem_usage_in_bytes: usize, } impl ShaderResourceUsageAMD { #[inline] pub fn num_used_vgprs(mut self, num_used_vgprs: u32) -> Self { self.num_used_vgprs = num_used_vgprs; self } #[inline] pub fn num_used_sgprs(mut self, num_used_sgprs: u32) -> Self { self.num_used_sgprs = num_used_sgprs; self } #[inline] pub fn lds_size_per_local_work_group(mut self, lds_size_per_local_work_group: u32) -> Self { self.lds_size_per_local_work_group = lds_size_per_local_work_group; self } #[inline] pub fn lds_usage_size_in_bytes(mut self, lds_usage_size_in_bytes: usize) -> Self { self.lds_usage_size_in_bytes = lds_usage_size_in_bytes; self } #[inline] pub fn scratch_mem_usage_in_bytes(mut self, scratch_mem_usage_in_bytes: usize) -> Self { self.scratch_mem_usage_in_bytes = scratch_mem_usage_in_bytes; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShaderStatisticsInfoAMD { pub shader_stage_mask: ShaderStageFlags, pub resource_usage: ShaderResourceUsageAMD, pub num_physical_vgprs: u32, pub num_physical_sgprs: u32, pub num_available_vgprs: u32, pub num_available_sgprs: u32, pub compute_work_group_size: [u32; 3], } impl ::core::default::Default for ShaderStatisticsInfoAMD { #[inline] fn default() -> Self { Self { shader_stage_mask: ShaderStageFlags::default(), resource_usage: ShaderResourceUsageAMD::default(), num_physical_vgprs: u32::default(), num_physical_sgprs: u32::default(), num_available_vgprs: u32::default(), num_available_sgprs: u32::default(), compute_work_group_size: unsafe { ::core::mem::zeroed() }, } } } impl ShaderStatisticsInfoAMD { #[inline] pub fn shader_stage_mask(mut self, shader_stage_mask: ShaderStageFlags) -> Self { self.shader_stage_mask = shader_stage_mask; self } #[inline] pub fn resource_usage(mut self, resource_usage: ShaderResourceUsageAMD) -> Self { self.resource_usage = resource_usage; self } #[inline] pub fn num_physical_vgprs(mut self, num_physical_vgprs: u32) -> Self { self.num_physical_vgprs = num_physical_vgprs; self } #[inline] pub fn num_physical_sgprs(mut self, num_physical_sgprs: u32) -> Self { self.num_physical_sgprs = num_physical_sgprs; self } #[inline] pub fn num_available_vgprs(mut self, num_available_vgprs: u32) -> Self { self.num_available_vgprs = num_available_vgprs; self } #[inline] pub fn num_available_sgprs(mut self, num_available_sgprs: u32) -> Self { self.num_available_sgprs = num_available_sgprs; self } #[inline] pub fn compute_work_group_size(mut self, compute_work_group_size: [u32; 3]) -> Self { self.compute_work_group_size = compute_work_group_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceQueueGlobalPriorityCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub global_priority: QueueGlobalPriorityKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceQueueGlobalPriorityCreateInfoKHR<'_> {} unsafe impl Sync for DeviceQueueGlobalPriorityCreateInfoKHR<'_> {} impl ::core::default::Default for DeviceQueueGlobalPriorityCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), global_priority: QueueGlobalPriorityKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceQueueGlobalPriorityCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR; } unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueGlobalPriorityCreateInfoKHR<'_> {} impl<'a> DeviceQueueGlobalPriorityCreateInfoKHR<'a> { #[inline] pub fn global_priority(mut self, global_priority: QueueGlobalPriorityKHR) -> Self { self.global_priority = global_priority; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub global_priority_query: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), global_priority_query: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'_> {} impl<'a> PhysicalDeviceGlobalPriorityQueryFeaturesKHR<'a> { #[inline] pub fn global_priority_query(mut self, global_priority_query: bool) -> Self { self.global_priority_query = global_priority_query.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyGlobalPriorityPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub priority_count: u32, pub priorities: [QueueGlobalPriorityKHR; MAX_GLOBAL_PRIORITY_SIZE_KHR], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyGlobalPriorityPropertiesKHR<'_> {} unsafe impl Sync for QueueFamilyGlobalPriorityPropertiesKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for QueueFamilyGlobalPriorityPropertiesKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("QueueFamilyGlobalPriorityPropertiesKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("priority_count", &self.priority_count) .field("priorities", &self.priorities_as_slice()) .finish() } } impl ::core::default::Default for QueueFamilyGlobalPriorityPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), priority_count: u32::default(), priorities: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyGlobalPriorityPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR; } unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyGlobalPriorityPropertiesKHR<'_> {} impl<'a> QueueFamilyGlobalPriorityPropertiesKHR<'a> { #[inline] pub fn priorities(mut self, priorities: &'_ [QueueGlobalPriorityKHR]) -> Self { self.priority_count = priorities.len() as _; self.priorities[..priorities.len()].copy_from_slice(priorities); self } #[inline] pub fn priorities_as_slice(&self) -> &[QueueGlobalPriorityKHR] { &self.priorities[..self.priority_count as _] } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugUtilsObjectNameInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub object_type: ObjectType, pub object_handle: u64, pub p_object_name: *const c_char, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugUtilsObjectNameInfoEXT<'_> {} unsafe impl Sync for DebugUtilsObjectNameInfoEXT<'_> {} impl ::core::default::Default for DebugUtilsObjectNameInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), object_type: ObjectType::default(), object_handle: u64::default(), p_object_name: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugUtilsObjectNameInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_OBJECT_NAME_INFO_EXT; } unsafe impl ExtendsPipelineShaderStageCreateInfo for DebugUtilsObjectNameInfoEXT<'_> {} impl<'a> DebugUtilsObjectNameInfoEXT<'a> { #[inline] pub fn object_handle(mut self, object_handle: T) -> Self { self.object_handle = object_handle.as_raw(); self.object_type = T::TYPE; self } #[inline] pub fn object_name(mut self, object_name: &'a CStr) -> Self { self.p_object_name = object_name.as_ptr(); self } #[inline] pub unsafe fn object_name_as_c_str(&self) -> Option<&CStr> { if self.p_object_name.is_null() { None } else { Some(CStr::from_ptr(self.p_object_name)) } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugUtilsObjectTagInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub object_type: ObjectType, pub object_handle: u64, pub tag_name: u64, pub tag_size: usize, pub p_tag: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugUtilsObjectTagInfoEXT<'_> {} unsafe impl Sync for DebugUtilsObjectTagInfoEXT<'_> {} impl ::core::default::Default for DebugUtilsObjectTagInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), object_type: ObjectType::default(), object_handle: u64::default(), tag_name: u64::default(), tag_size: usize::default(), p_tag: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugUtilsObjectTagInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_OBJECT_TAG_INFO_EXT; } impl<'a> DebugUtilsObjectTagInfoEXT<'a> { #[inline] pub fn object_handle(mut self, object_handle: T) -> Self { self.object_handle = object_handle.as_raw(); self.object_type = T::TYPE; self } #[inline] pub fn tag_name(mut self, tag_name: u64) -> Self { self.tag_name = tag_name; self } #[inline] pub fn tag(mut self, tag: &'a [u8]) -> Self { self.tag_size = tag.len(); self.p_tag = tag.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugUtilsLabelEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_label_name: *const c_char, pub color: [f32; 4], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugUtilsLabelEXT<'_> {} unsafe impl Sync for DebugUtilsLabelEXT<'_> {} impl ::core::default::Default for DebugUtilsLabelEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_label_name: ::core::ptr::null(), color: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugUtilsLabelEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_LABEL_EXT; } impl<'a> DebugUtilsLabelEXT<'a> { #[inline] pub fn label_name(mut self, label_name: &'a CStr) -> Self { self.p_label_name = label_name.as_ptr(); self } #[inline] pub unsafe fn label_name_as_c_str(&self) -> Option<&CStr> { if self.p_label_name.is_null() { None } else { Some(CStr::from_ptr(self.p_label_name)) } } #[inline] pub fn color(mut self, color: [f32; 4]) -> Self { self.color = color; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugUtilsMessengerCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DebugUtilsMessengerCreateFlagsEXT, pub message_severity: DebugUtilsMessageSeverityFlagsEXT, pub message_type: DebugUtilsMessageTypeFlagsEXT, pub pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT, pub p_user_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugUtilsMessengerCreateInfoEXT<'_> {} unsafe impl Sync for DebugUtilsMessengerCreateInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DebugUtilsMessengerCreateInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DebugUtilsMessengerCreateInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("flags", &self.flags) .field("message_severity", &self.message_severity) .field("message_type", &self.message_type) .field( "pfn_user_callback", &(self.pfn_user_callback.map(|x| x as *const ())), ) .field("p_user_data", &self.p_user_data) .finish() } } impl ::core::default::Default for DebugUtilsMessengerCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DebugUtilsMessengerCreateFlagsEXT::default(), message_severity: DebugUtilsMessageSeverityFlagsEXT::default(), message_type: DebugUtilsMessageTypeFlagsEXT::default(), pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT::default(), p_user_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugUtilsMessengerCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; } unsafe impl ExtendsInstanceCreateInfo for DebugUtilsMessengerCreateInfoEXT<'_> {} impl<'a> DebugUtilsMessengerCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: DebugUtilsMessengerCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn message_severity(mut self, message_severity: DebugUtilsMessageSeverityFlagsEXT) -> Self { self.message_severity = message_severity; self } #[inline] pub fn message_type(mut self, message_type: DebugUtilsMessageTypeFlagsEXT) -> Self { self.message_type = message_type; self } #[inline] pub fn pfn_user_callback( mut self, pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT, ) -> Self { self.pfn_user_callback = pfn_user_callback; self } #[inline] pub fn user_data(mut self, user_data: *mut c_void) -> Self { self.p_user_data = user_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DebugUtilsMessengerCallbackDataEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DebugUtilsMessengerCallbackDataFlagsEXT, pub p_message_id_name: *const c_char, pub message_id_number: i32, pub p_message: *const c_char, pub queue_label_count: u32, pub p_queue_labels: *const DebugUtilsLabelEXT<'a>, pub cmd_buf_label_count: u32, pub p_cmd_buf_labels: *const DebugUtilsLabelEXT<'a>, pub object_count: u32, pub p_objects: *const DebugUtilsObjectNameInfoEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DebugUtilsMessengerCallbackDataEXT<'_> {} unsafe impl Sync for DebugUtilsMessengerCallbackDataEXT<'_> {} impl ::core::default::Default for DebugUtilsMessengerCallbackDataEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DebugUtilsMessengerCallbackDataFlagsEXT::default(), p_message_id_name: ::core::ptr::null(), message_id_number: i32::default(), p_message: ::core::ptr::null(), queue_label_count: u32::default(), p_queue_labels: ::core::ptr::null(), cmd_buf_label_count: u32::default(), p_cmd_buf_labels: ::core::ptr::null(), object_count: u32::default(), p_objects: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DebugUtilsMessengerCallbackDataEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT; } pub unsafe trait ExtendsDebugUtilsMessengerCallbackDataEXT {} impl<'a> DebugUtilsMessengerCallbackDataEXT<'a> { #[inline] pub fn flags(mut self, flags: DebugUtilsMessengerCallbackDataFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn message_id_name(mut self, message_id_name: &'a CStr) -> Self { self.p_message_id_name = message_id_name.as_ptr(); self } #[inline] pub unsafe fn message_id_name_as_c_str(&self) -> Option<&CStr> { if self.p_message_id_name.is_null() { None } else { Some(CStr::from_ptr(self.p_message_id_name)) } } #[inline] pub fn message_id_number(mut self, message_id_number: i32) -> Self { self.message_id_number = message_id_number; self } #[inline] pub fn message(mut self, message: &'a CStr) -> Self { self.p_message = message.as_ptr(); self } #[inline] pub unsafe fn message_as_c_str(&self) -> Option<&CStr> { if self.p_message.is_null() { None } else { Some(CStr::from_ptr(self.p_message)) } } #[inline] pub fn queue_labels(mut self, queue_labels: &'a [DebugUtilsLabelEXT<'a>]) -> Self { self.queue_label_count = queue_labels.len() as _; self.p_queue_labels = queue_labels.as_ptr(); self } #[inline] pub fn cmd_buf_labels(mut self, cmd_buf_labels: &'a [DebugUtilsLabelEXT<'a>]) -> Self { self.cmd_buf_label_count = cmd_buf_labels.len() as _; self.p_cmd_buf_labels = cmd_buf_labels.as_ptr(); self } #[inline] pub fn objects(mut self, objects: &'a [DebugUtilsObjectNameInfoEXT<'a>]) -> Self { self.object_count = objects.len() as _; self.p_objects = objects.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDeviceMemoryReportFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_memory_report: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_memory_report: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceMemoryReportFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDeviceMemoryReportFeaturesEXT<'a> { #[inline] pub fn device_memory_report(mut self, device_memory_report: bool) -> Self { self.device_memory_report = device_memory_report.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceDeviceMemoryReportCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DeviceMemoryReportFlagsEXT, pub pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT, pub p_user_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceDeviceMemoryReportCreateInfoEXT<'_> {} unsafe impl Sync for DeviceDeviceMemoryReportCreateInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DeviceDeviceMemoryReportCreateInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DeviceDeviceMemoryReportCreateInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("flags", &self.flags) .field( "pfn_user_callback", &(self.pfn_user_callback.map(|x| x as *const ())), ) .field("p_user_data", &self.p_user_data) .finish() } } impl ::core::default::Default for DeviceDeviceMemoryReportCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DeviceMemoryReportFlagsEXT::default(), pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT::default(), p_user_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceDeviceMemoryReportCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT; } unsafe impl ExtendsDeviceCreateInfo for DeviceDeviceMemoryReportCreateInfoEXT<'_> {} impl<'a> DeviceDeviceMemoryReportCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: DeviceMemoryReportFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn pfn_user_callback( mut self, pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT, ) -> Self { self.pfn_user_callback = pfn_user_callback; self } #[inline] pub fn user_data(mut self, user_data: *mut c_void) -> Self { self.p_user_data = user_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceMemoryReportCallbackDataEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: DeviceMemoryReportFlagsEXT, pub ty: DeviceMemoryReportEventTypeEXT, pub memory_object_id: u64, pub size: DeviceSize, pub object_type: ObjectType, pub object_handle: u64, pub heap_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceMemoryReportCallbackDataEXT<'_> {} unsafe impl Sync for DeviceMemoryReportCallbackDataEXT<'_> {} impl ::core::default::Default for DeviceMemoryReportCallbackDataEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: DeviceMemoryReportFlagsEXT::default(), ty: DeviceMemoryReportEventTypeEXT::default(), memory_object_id: u64::default(), size: DeviceSize::default(), object_type: ObjectType::default(), object_handle: u64::default(), heap_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceMemoryReportCallbackDataEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT; } impl<'a> DeviceMemoryReportCallbackDataEXT<'a> { #[inline] pub fn flags(mut self, flags: DeviceMemoryReportFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn ty(mut self, ty: DeviceMemoryReportEventTypeEXT) -> Self { self.ty = ty; self } #[inline] pub fn memory_object_id(mut self, memory_object_id: u64) -> Self { self.memory_object_id = memory_object_id; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn object_handle(mut self, object_handle: T) -> Self { self.object_handle = object_handle.as_raw(); self.object_type = T::TYPE; self } #[inline] pub fn heap_index(mut self, heap_index: u32) -> Self { self.heap_index = heap_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryHostPointerInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub handle_type: ExternalMemoryHandleTypeFlags, pub p_host_pointer: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryHostPointerInfoEXT<'_> {} unsafe impl Sync for ImportMemoryHostPointerInfoEXT<'_> {} impl ::core::default::Default for ImportMemoryHostPointerInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), handle_type: ExternalMemoryHandleTypeFlags::default(), p_host_pointer: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryHostPointerInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_HOST_POINTER_INFO_EXT; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryHostPointerInfoEXT<'_> {} impl<'a> ImportMemoryHostPointerInfoEXT<'a> { #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } #[inline] pub fn host_pointer(mut self, host_pointer: *mut c_void) -> Self { self.p_host_pointer = host_pointer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryHostPointerPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryHostPointerPropertiesEXT<'_> {} unsafe impl Sync for MemoryHostPointerPropertiesEXT<'_> {} impl ::core::default::Default for MemoryHostPointerPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryHostPointerPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_HOST_POINTER_PROPERTIES_EXT; } impl<'a> MemoryHostPointerPropertiesEXT<'a> { #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalMemoryHostPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_imported_host_pointer_alignment: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalMemoryHostPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceExternalMemoryHostPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceExternalMemoryHostPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_imported_host_pointer_alignment: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalMemoryHostPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExternalMemoryHostPropertiesEXT<'_> {} impl<'a> PhysicalDeviceExternalMemoryHostPropertiesEXT<'a> { #[inline] pub fn min_imported_host_pointer_alignment( mut self, min_imported_host_pointer_alignment: DeviceSize, ) -> Self { self.min_imported_host_pointer_alignment = min_imported_host_pointer_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceConservativeRasterizationPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub primitive_overestimation_size: f32, pub max_extra_primitive_overestimation_size: f32, pub extra_primitive_overestimation_size_granularity: f32, pub primitive_underestimation: Bool32, pub conservative_point_and_line_rasterization: Bool32, pub degenerate_triangles_rasterized: Bool32, pub degenerate_lines_rasterized: Bool32, pub fully_covered_fragment_shader_input_variable: Bool32, pub conservative_rasterization_post_depth_coverage: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceConservativeRasterizationPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceConservativeRasterizationPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceConservativeRasterizationPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), primitive_overestimation_size: f32::default(), max_extra_primitive_overestimation_size: f32::default(), extra_primitive_overestimation_size_granularity: f32::default(), primitive_underestimation: Bool32::default(), conservative_point_and_line_rasterization: Bool32::default(), degenerate_triangles_rasterized: Bool32::default(), degenerate_lines_rasterized: Bool32::default(), fully_covered_fragment_shader_input_variable: Bool32::default(), conservative_rasterization_post_depth_coverage: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceConservativeRasterizationPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceConservativeRasterizationPropertiesEXT<'_> { } impl<'a> PhysicalDeviceConservativeRasterizationPropertiesEXT<'a> { #[inline] pub fn primitive_overestimation_size(mut self, primitive_overestimation_size: f32) -> Self { self.primitive_overestimation_size = primitive_overestimation_size; self } #[inline] pub fn max_extra_primitive_overestimation_size( mut self, max_extra_primitive_overestimation_size: f32, ) -> Self { self.max_extra_primitive_overestimation_size = max_extra_primitive_overestimation_size; self } #[inline] pub fn extra_primitive_overestimation_size_granularity( mut self, extra_primitive_overestimation_size_granularity: f32, ) -> Self { self.extra_primitive_overestimation_size_granularity = extra_primitive_overestimation_size_granularity; self } #[inline] pub fn primitive_underestimation(mut self, primitive_underestimation: bool) -> Self { self.primitive_underestimation = primitive_underestimation.into(); self } #[inline] pub fn conservative_point_and_line_rasterization( mut self, conservative_point_and_line_rasterization: bool, ) -> Self { self.conservative_point_and_line_rasterization = conservative_point_and_line_rasterization.into(); self } #[inline] pub fn degenerate_triangles_rasterized( mut self, degenerate_triangles_rasterized: bool, ) -> Self { self.degenerate_triangles_rasterized = degenerate_triangles_rasterized.into(); self } #[inline] pub fn degenerate_lines_rasterized(mut self, degenerate_lines_rasterized: bool) -> Self { self.degenerate_lines_rasterized = degenerate_lines_rasterized.into(); self } #[inline] pub fn fully_covered_fragment_shader_input_variable( mut self, fully_covered_fragment_shader_input_variable: bool, ) -> Self { self.fully_covered_fragment_shader_input_variable = fully_covered_fragment_shader_input_variable.into(); self } #[inline] pub fn conservative_rasterization_post_depth_coverage( mut self, conservative_rasterization_post_depth_coverage: bool, ) -> Self { self.conservative_rasterization_post_depth_coverage = conservative_rasterization_post_depth_coverage.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CalibratedTimestampInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub time_domain: TimeDomainKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CalibratedTimestampInfoKHR<'_> {} unsafe impl Sync for CalibratedTimestampInfoKHR<'_> {} impl ::core::default::Default for CalibratedTimestampInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), time_domain: TimeDomainKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CalibratedTimestampInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CALIBRATED_TIMESTAMP_INFO_KHR; } impl<'a> CalibratedTimestampInfoKHR<'a> { #[inline] pub fn time_domain(mut self, time_domain: TimeDomainKHR) -> Self { self.time_domain = time_domain; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderCorePropertiesAMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_engine_count: u32, pub shader_arrays_per_engine_count: u32, pub compute_units_per_shader_array: u32, pub simd_per_compute_unit: u32, pub wavefronts_per_simd: u32, pub wavefront_size: u32, pub sgprs_per_simd: u32, pub min_sgpr_allocation: u32, pub max_sgpr_allocation: u32, pub sgpr_allocation_granularity: u32, pub vgprs_per_simd: u32, pub min_vgpr_allocation: u32, pub max_vgpr_allocation: u32, pub vgpr_allocation_granularity: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderCorePropertiesAMD<'_> {} unsafe impl Sync for PhysicalDeviceShaderCorePropertiesAMD<'_> {} impl ::core::default::Default for PhysicalDeviceShaderCorePropertiesAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_engine_count: u32::default(), shader_arrays_per_engine_count: u32::default(), compute_units_per_shader_array: u32::default(), simd_per_compute_unit: u32::default(), wavefronts_per_simd: u32::default(), wavefront_size: u32::default(), sgprs_per_simd: u32::default(), min_sgpr_allocation: u32::default(), max_sgpr_allocation: u32::default(), sgpr_allocation_granularity: u32::default(), vgprs_per_simd: u32::default(), min_vgpr_allocation: u32::default(), max_vgpr_allocation: u32::default(), vgpr_allocation_granularity: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderCorePropertiesAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesAMD<'_> {} impl<'a> PhysicalDeviceShaderCorePropertiesAMD<'a> { #[inline] pub fn shader_engine_count(mut self, shader_engine_count: u32) -> Self { self.shader_engine_count = shader_engine_count; self } #[inline] pub fn shader_arrays_per_engine_count(mut self, shader_arrays_per_engine_count: u32) -> Self { self.shader_arrays_per_engine_count = shader_arrays_per_engine_count; self } #[inline] pub fn compute_units_per_shader_array(mut self, compute_units_per_shader_array: u32) -> Self { self.compute_units_per_shader_array = compute_units_per_shader_array; self } #[inline] pub fn simd_per_compute_unit(mut self, simd_per_compute_unit: u32) -> Self { self.simd_per_compute_unit = simd_per_compute_unit; self } #[inline] pub fn wavefronts_per_simd(mut self, wavefronts_per_simd: u32) -> Self { self.wavefronts_per_simd = wavefronts_per_simd; self } #[inline] pub fn wavefront_size(mut self, wavefront_size: u32) -> Self { self.wavefront_size = wavefront_size; self } #[inline] pub fn sgprs_per_simd(mut self, sgprs_per_simd: u32) -> Self { self.sgprs_per_simd = sgprs_per_simd; self } #[inline] pub fn min_sgpr_allocation(mut self, min_sgpr_allocation: u32) -> Self { self.min_sgpr_allocation = min_sgpr_allocation; self } #[inline] pub fn max_sgpr_allocation(mut self, max_sgpr_allocation: u32) -> Self { self.max_sgpr_allocation = max_sgpr_allocation; self } #[inline] pub fn sgpr_allocation_granularity(mut self, sgpr_allocation_granularity: u32) -> Self { self.sgpr_allocation_granularity = sgpr_allocation_granularity; self } #[inline] pub fn vgprs_per_simd(mut self, vgprs_per_simd: u32) -> Self { self.vgprs_per_simd = vgprs_per_simd; self } #[inline] pub fn min_vgpr_allocation(mut self, min_vgpr_allocation: u32) -> Self { self.min_vgpr_allocation = min_vgpr_allocation; self } #[inline] pub fn max_vgpr_allocation(mut self, max_vgpr_allocation: u32) -> Self { self.max_vgpr_allocation = max_vgpr_allocation; self } #[inline] pub fn vgpr_allocation_granularity(mut self, vgpr_allocation_granularity: u32) -> Self { self.vgpr_allocation_granularity = vgpr_allocation_granularity; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderCoreProperties2AMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_core_features: ShaderCorePropertiesFlagsAMD, pub active_compute_unit_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderCoreProperties2AMD<'_> {} unsafe impl Sync for PhysicalDeviceShaderCoreProperties2AMD<'_> {} impl ::core::default::Default for PhysicalDeviceShaderCoreProperties2AMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_core_features: ShaderCorePropertiesFlagsAMD::default(), active_compute_unit_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderCoreProperties2AMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCoreProperties2AMD<'_> {} impl<'a> PhysicalDeviceShaderCoreProperties2AMD<'a> { #[inline] pub fn shader_core_features( mut self, shader_core_features: ShaderCorePropertiesFlagsAMD, ) -> Self { self.shader_core_features = shader_core_features; self } #[inline] pub fn active_compute_unit_count(mut self, active_compute_unit_count: u32) -> Self { self.active_compute_unit_count = active_compute_unit_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationConservativeStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineRasterizationConservativeStateCreateFlagsEXT, pub conservative_rasterization_mode: ConservativeRasterizationModeEXT, pub extra_primitive_overestimation_size: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationConservativeStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineRasterizationConservativeStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineRasterizationConservativeStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineRasterizationConservativeStateCreateFlagsEXT::default(), conservative_rasterization_mode: ConservativeRasterizationModeEXT::default(), extra_primitive_overestimation_size: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationConservativeStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationConservativeStateCreateInfoEXT<'_> { } impl<'a> PipelineRasterizationConservativeStateCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: PipelineRasterizationConservativeStateCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn conservative_rasterization_mode( mut self, conservative_rasterization_mode: ConservativeRasterizationModeEXT, ) -> Self { self.conservative_rasterization_mode = conservative_rasterization_mode; self } #[inline] pub fn extra_primitive_overestimation_size( mut self, extra_primitive_overestimation_size: f32, ) -> Self { self.extra_primitive_overestimation_size = extra_primitive_overestimation_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorIndexingFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_input_attachment_array_dynamic_indexing: Bool32, pub shader_uniform_texel_buffer_array_dynamic_indexing: Bool32, pub shader_storage_texel_buffer_array_dynamic_indexing: Bool32, pub shader_uniform_buffer_array_non_uniform_indexing: Bool32, pub shader_sampled_image_array_non_uniform_indexing: Bool32, pub shader_storage_buffer_array_non_uniform_indexing: Bool32, pub shader_storage_image_array_non_uniform_indexing: Bool32, pub shader_input_attachment_array_non_uniform_indexing: Bool32, pub shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32, pub shader_storage_texel_buffer_array_non_uniform_indexing: Bool32, pub descriptor_binding_uniform_buffer_update_after_bind: Bool32, pub descriptor_binding_sampled_image_update_after_bind: Bool32, pub descriptor_binding_storage_image_update_after_bind: Bool32, pub descriptor_binding_storage_buffer_update_after_bind: Bool32, pub descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32, pub descriptor_binding_storage_texel_buffer_update_after_bind: Bool32, pub descriptor_binding_update_unused_while_pending: Bool32, pub descriptor_binding_partially_bound: Bool32, pub descriptor_binding_variable_descriptor_count: Bool32, pub runtime_descriptor_array: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorIndexingFeatures<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorIndexingFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorIndexingFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_input_attachment_array_dynamic_indexing: Bool32::default(), shader_uniform_texel_buffer_array_dynamic_indexing: Bool32::default(), shader_storage_texel_buffer_array_dynamic_indexing: Bool32::default(), shader_uniform_buffer_array_non_uniform_indexing: Bool32::default(), shader_sampled_image_array_non_uniform_indexing: Bool32::default(), shader_storage_buffer_array_non_uniform_indexing: Bool32::default(), shader_storage_image_array_non_uniform_indexing: Bool32::default(), shader_input_attachment_array_non_uniform_indexing: Bool32::default(), shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32::default(), shader_storage_texel_buffer_array_non_uniform_indexing: Bool32::default(), descriptor_binding_uniform_buffer_update_after_bind: Bool32::default(), descriptor_binding_sampled_image_update_after_bind: Bool32::default(), descriptor_binding_storage_image_update_after_bind: Bool32::default(), descriptor_binding_storage_buffer_update_after_bind: Bool32::default(), descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32::default(), descriptor_binding_storage_texel_buffer_update_after_bind: Bool32::default(), descriptor_binding_update_unused_while_pending: Bool32::default(), descriptor_binding_partially_bound: Bool32::default(), descriptor_binding_variable_descriptor_count: Bool32::default(), runtime_descriptor_array: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorIndexingFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorIndexingFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorIndexingFeatures<'_> {} impl<'a> PhysicalDeviceDescriptorIndexingFeatures<'a> { #[inline] pub fn shader_input_attachment_array_dynamic_indexing( mut self, shader_input_attachment_array_dynamic_indexing: bool, ) -> Self { self.shader_input_attachment_array_dynamic_indexing = shader_input_attachment_array_dynamic_indexing.into(); self } #[inline] pub fn shader_uniform_texel_buffer_array_dynamic_indexing( mut self, shader_uniform_texel_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_uniform_texel_buffer_array_dynamic_indexing = shader_uniform_texel_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_storage_texel_buffer_array_dynamic_indexing( mut self, shader_storage_texel_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_storage_texel_buffer_array_dynamic_indexing = shader_storage_texel_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_uniform_buffer_array_non_uniform_indexing( mut self, shader_uniform_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_uniform_buffer_array_non_uniform_indexing = shader_uniform_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_sampled_image_array_non_uniform_indexing( mut self, shader_sampled_image_array_non_uniform_indexing: bool, ) -> Self { self.shader_sampled_image_array_non_uniform_indexing = shader_sampled_image_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_buffer_array_non_uniform_indexing( mut self, shader_storage_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_buffer_array_non_uniform_indexing = shader_storage_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_image_array_non_uniform_indexing( mut self, shader_storage_image_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_image_array_non_uniform_indexing = shader_storage_image_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_input_attachment_array_non_uniform_indexing( mut self, shader_input_attachment_array_non_uniform_indexing: bool, ) -> Self { self.shader_input_attachment_array_non_uniform_indexing = shader_input_attachment_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_uniform_texel_buffer_array_non_uniform_indexing( mut self, shader_uniform_texel_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_uniform_texel_buffer_array_non_uniform_indexing = shader_uniform_texel_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_texel_buffer_array_non_uniform_indexing( mut self, shader_storage_texel_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_texel_buffer_array_non_uniform_indexing = shader_storage_texel_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn descriptor_binding_uniform_buffer_update_after_bind( mut self, descriptor_binding_uniform_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_uniform_buffer_update_after_bind = descriptor_binding_uniform_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_sampled_image_update_after_bind( mut self, descriptor_binding_sampled_image_update_after_bind: bool, ) -> Self { self.descriptor_binding_sampled_image_update_after_bind = descriptor_binding_sampled_image_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_image_update_after_bind( mut self, descriptor_binding_storage_image_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_image_update_after_bind = descriptor_binding_storage_image_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_buffer_update_after_bind( mut self, descriptor_binding_storage_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_buffer_update_after_bind = descriptor_binding_storage_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_uniform_texel_buffer_update_after_bind( mut self, descriptor_binding_uniform_texel_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_uniform_texel_buffer_update_after_bind = descriptor_binding_uniform_texel_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_texel_buffer_update_after_bind( mut self, descriptor_binding_storage_texel_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_texel_buffer_update_after_bind = descriptor_binding_storage_texel_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_update_unused_while_pending( mut self, descriptor_binding_update_unused_while_pending: bool, ) -> Self { self.descriptor_binding_update_unused_while_pending = descriptor_binding_update_unused_while_pending.into(); self } #[inline] pub fn descriptor_binding_partially_bound( mut self, descriptor_binding_partially_bound: bool, ) -> Self { self.descriptor_binding_partially_bound = descriptor_binding_partially_bound.into(); self } #[inline] pub fn descriptor_binding_variable_descriptor_count( mut self, descriptor_binding_variable_descriptor_count: bool, ) -> Self { self.descriptor_binding_variable_descriptor_count = descriptor_binding_variable_descriptor_count.into(); self } #[inline] pub fn runtime_descriptor_array(mut self, runtime_descriptor_array: bool) -> Self { self.runtime_descriptor_array = runtime_descriptor_array.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorIndexingProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_update_after_bind_descriptors_in_all_pools: u32, pub shader_uniform_buffer_array_non_uniform_indexing_native: Bool32, pub shader_sampled_image_array_non_uniform_indexing_native: Bool32, pub shader_storage_buffer_array_non_uniform_indexing_native: Bool32, pub shader_storage_image_array_non_uniform_indexing_native: Bool32, pub shader_input_attachment_array_non_uniform_indexing_native: Bool32, pub robust_buffer_access_update_after_bind: Bool32, pub quad_divergent_implicit_lod: Bool32, pub max_per_stage_descriptor_update_after_bind_samplers: u32, pub max_per_stage_descriptor_update_after_bind_uniform_buffers: u32, pub max_per_stage_descriptor_update_after_bind_storage_buffers: u32, pub max_per_stage_descriptor_update_after_bind_sampled_images: u32, pub max_per_stage_descriptor_update_after_bind_storage_images: u32, pub max_per_stage_descriptor_update_after_bind_input_attachments: u32, pub max_per_stage_update_after_bind_resources: u32, pub max_descriptor_set_update_after_bind_samplers: u32, pub max_descriptor_set_update_after_bind_uniform_buffers: u32, pub max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32, pub max_descriptor_set_update_after_bind_storage_buffers: u32, pub max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32, pub max_descriptor_set_update_after_bind_sampled_images: u32, pub max_descriptor_set_update_after_bind_storage_images: u32, pub max_descriptor_set_update_after_bind_input_attachments: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorIndexingProperties<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorIndexingProperties<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorIndexingProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_update_after_bind_descriptors_in_all_pools: u32::default(), shader_uniform_buffer_array_non_uniform_indexing_native: Bool32::default(), shader_sampled_image_array_non_uniform_indexing_native: Bool32::default(), shader_storage_buffer_array_non_uniform_indexing_native: Bool32::default(), shader_storage_image_array_non_uniform_indexing_native: Bool32::default(), shader_input_attachment_array_non_uniform_indexing_native: Bool32::default(), robust_buffer_access_update_after_bind: Bool32::default(), quad_divergent_implicit_lod: Bool32::default(), max_per_stage_descriptor_update_after_bind_samplers: u32::default(), max_per_stage_descriptor_update_after_bind_uniform_buffers: u32::default(), max_per_stage_descriptor_update_after_bind_storage_buffers: u32::default(), max_per_stage_descriptor_update_after_bind_sampled_images: u32::default(), max_per_stage_descriptor_update_after_bind_storage_images: u32::default(), max_per_stage_descriptor_update_after_bind_input_attachments: u32::default(), max_per_stage_update_after_bind_resources: u32::default(), max_descriptor_set_update_after_bind_samplers: u32::default(), max_descriptor_set_update_after_bind_uniform_buffers: u32::default(), max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32::default(), max_descriptor_set_update_after_bind_storage_buffers: u32::default(), max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32::default(), max_descriptor_set_update_after_bind_sampled_images: u32::default(), max_descriptor_set_update_after_bind_storage_images: u32::default(), max_descriptor_set_update_after_bind_input_attachments: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorIndexingProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorIndexingProperties<'_> {} impl<'a> PhysicalDeviceDescriptorIndexingProperties<'a> { #[inline] pub fn max_update_after_bind_descriptors_in_all_pools( mut self, max_update_after_bind_descriptors_in_all_pools: u32, ) -> Self { self.max_update_after_bind_descriptors_in_all_pools = max_update_after_bind_descriptors_in_all_pools; self } #[inline] pub fn shader_uniform_buffer_array_non_uniform_indexing_native( mut self, shader_uniform_buffer_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_uniform_buffer_array_non_uniform_indexing_native = shader_uniform_buffer_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_sampled_image_array_non_uniform_indexing_native( mut self, shader_sampled_image_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_sampled_image_array_non_uniform_indexing_native = shader_sampled_image_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_storage_buffer_array_non_uniform_indexing_native( mut self, shader_storage_buffer_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_storage_buffer_array_non_uniform_indexing_native = shader_storage_buffer_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_storage_image_array_non_uniform_indexing_native( mut self, shader_storage_image_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_storage_image_array_non_uniform_indexing_native = shader_storage_image_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_input_attachment_array_non_uniform_indexing_native( mut self, shader_input_attachment_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_input_attachment_array_non_uniform_indexing_native = shader_input_attachment_array_non_uniform_indexing_native.into(); self } #[inline] pub fn robust_buffer_access_update_after_bind( mut self, robust_buffer_access_update_after_bind: bool, ) -> Self { self.robust_buffer_access_update_after_bind = robust_buffer_access_update_after_bind.into(); self } #[inline] pub fn quad_divergent_implicit_lod(mut self, quad_divergent_implicit_lod: bool) -> Self { self.quad_divergent_implicit_lod = quad_divergent_implicit_lod.into(); self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_samplers( mut self, max_per_stage_descriptor_update_after_bind_samplers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_samplers = max_per_stage_descriptor_update_after_bind_samplers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_uniform_buffers( mut self, max_per_stage_descriptor_update_after_bind_uniform_buffers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_uniform_buffers = max_per_stage_descriptor_update_after_bind_uniform_buffers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_storage_buffers( mut self, max_per_stage_descriptor_update_after_bind_storage_buffers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_storage_buffers = max_per_stage_descriptor_update_after_bind_storage_buffers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_sampled_images( mut self, max_per_stage_descriptor_update_after_bind_sampled_images: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_sampled_images = max_per_stage_descriptor_update_after_bind_sampled_images; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_storage_images( mut self, max_per_stage_descriptor_update_after_bind_storage_images: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_storage_images = max_per_stage_descriptor_update_after_bind_storage_images; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_input_attachments( mut self, max_per_stage_descriptor_update_after_bind_input_attachments: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_input_attachments = max_per_stage_descriptor_update_after_bind_input_attachments; self } #[inline] pub fn max_per_stage_update_after_bind_resources( mut self, max_per_stage_update_after_bind_resources: u32, ) -> Self { self.max_per_stage_update_after_bind_resources = max_per_stage_update_after_bind_resources; self } #[inline] pub fn max_descriptor_set_update_after_bind_samplers( mut self, max_descriptor_set_update_after_bind_samplers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_samplers = max_descriptor_set_update_after_bind_samplers; self } #[inline] pub fn max_descriptor_set_update_after_bind_uniform_buffers( mut self, max_descriptor_set_update_after_bind_uniform_buffers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_uniform_buffers = max_descriptor_set_update_after_bind_uniform_buffers; self } #[inline] pub fn max_descriptor_set_update_after_bind_uniform_buffers_dynamic( mut self, max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_update_after_bind_uniform_buffers_dynamic = max_descriptor_set_update_after_bind_uniform_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_buffers( mut self, max_descriptor_set_update_after_bind_storage_buffers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_buffers = max_descriptor_set_update_after_bind_storage_buffers; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_buffers_dynamic( mut self, max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_buffers_dynamic = max_descriptor_set_update_after_bind_storage_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_update_after_bind_sampled_images( mut self, max_descriptor_set_update_after_bind_sampled_images: u32, ) -> Self { self.max_descriptor_set_update_after_bind_sampled_images = max_descriptor_set_update_after_bind_sampled_images; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_images( mut self, max_descriptor_set_update_after_bind_storage_images: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_images = max_descriptor_set_update_after_bind_storage_images; self } #[inline] pub fn max_descriptor_set_update_after_bind_input_attachments( mut self, max_descriptor_set_update_after_bind_input_attachments: u32, ) -> Self { self.max_descriptor_set_update_after_bind_input_attachments = max_descriptor_set_update_after_bind_input_attachments; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetLayoutBindingFlagsCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub binding_count: u32, pub p_binding_flags: *const DescriptorBindingFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetLayoutBindingFlagsCreateInfo<'_> {} unsafe impl Sync for DescriptorSetLayoutBindingFlagsCreateInfo<'_> {} impl ::core::default::Default for DescriptorSetLayoutBindingFlagsCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), binding_count: u32::default(), p_binding_flags: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetLayoutBindingFlagsCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO; } unsafe impl ExtendsDescriptorSetLayoutCreateInfo for DescriptorSetLayoutBindingFlagsCreateInfo<'_> {} impl<'a> DescriptorSetLayoutBindingFlagsCreateInfo<'a> { #[inline] pub fn binding_flags(mut self, binding_flags: &'a [DescriptorBindingFlags]) -> Self { self.binding_count = binding_flags.len() as _; self.p_binding_flags = binding_flags.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetVariableDescriptorCountAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub descriptor_set_count: u32, pub p_descriptor_counts: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetVariableDescriptorCountAllocateInfo<'_> {} unsafe impl Sync for DescriptorSetVariableDescriptorCountAllocateInfo<'_> {} impl ::core::default::Default for DescriptorSetVariableDescriptorCountAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), descriptor_set_count: u32::default(), p_descriptor_counts: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetVariableDescriptorCountAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO; } unsafe impl ExtendsDescriptorSetAllocateInfo for DescriptorSetVariableDescriptorCountAllocateInfo<'_> { } impl<'a> DescriptorSetVariableDescriptorCountAllocateInfo<'a> { #[inline] pub fn descriptor_counts(mut self, descriptor_counts: &'a [u32]) -> Self { self.descriptor_set_count = descriptor_counts.len() as _; self.p_descriptor_counts = descriptor_counts.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetVariableDescriptorCountLayoutSupport<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_variable_descriptor_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetVariableDescriptorCountLayoutSupport<'_> {} unsafe impl Sync for DescriptorSetVariableDescriptorCountLayoutSupport<'_> {} impl ::core::default::Default for DescriptorSetVariableDescriptorCountLayoutSupport<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_variable_descriptor_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetVariableDescriptorCountLayoutSupport<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT; } unsafe impl ExtendsDescriptorSetLayoutSupport for DescriptorSetVariableDescriptorCountLayoutSupport<'_> { } impl<'a> DescriptorSetVariableDescriptorCountLayoutSupport<'a> { #[inline] pub fn max_variable_descriptor_count(mut self, max_variable_descriptor_count: u32) -> Self { self.max_variable_descriptor_count = max_variable_descriptor_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AttachmentDescription2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: AttachmentDescriptionFlags, pub format: Format, pub samples: SampleCountFlags, pub load_op: AttachmentLoadOp, pub store_op: AttachmentStoreOp, pub stencil_load_op: AttachmentLoadOp, pub stencil_store_op: AttachmentStoreOp, pub initial_layout: ImageLayout, pub final_layout: ImageLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AttachmentDescription2<'_> {} unsafe impl Sync for AttachmentDescription2<'_> {} impl ::core::default::Default for AttachmentDescription2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: AttachmentDescriptionFlags::default(), format: Format::default(), samples: SampleCountFlags::default(), load_op: AttachmentLoadOp::default(), store_op: AttachmentStoreOp::default(), stencil_load_op: AttachmentLoadOp::default(), stencil_store_op: AttachmentStoreOp::default(), initial_layout: ImageLayout::default(), final_layout: ImageLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AttachmentDescription2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_DESCRIPTION_2; } pub unsafe trait ExtendsAttachmentDescription2 {} impl<'a> AttachmentDescription2<'a> { #[inline] pub fn flags(mut self, flags: AttachmentDescriptionFlags) -> Self { self.flags = flags; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn samples(mut self, samples: SampleCountFlags) -> Self { self.samples = samples; self } #[inline] pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self { self.load_op = load_op; self } #[inline] pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self { self.store_op = store_op; self } #[inline] pub fn stencil_load_op(mut self, stencil_load_op: AttachmentLoadOp) -> Self { self.stencil_load_op = stencil_load_op; self } #[inline] pub fn stencil_store_op(mut self, stencil_store_op: AttachmentStoreOp) -> Self { self.stencil_store_op = stencil_store_op; self } #[inline] pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self { self.initial_layout = initial_layout; self } #[inline] pub fn final_layout(mut self, final_layout: ImageLayout) -> Self { self.final_layout = final_layout; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AttachmentReference2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub attachment: u32, pub layout: ImageLayout, pub aspect_mask: ImageAspectFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AttachmentReference2<'_> {} unsafe impl Sync for AttachmentReference2<'_> {} impl ::core::default::Default for AttachmentReference2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), attachment: u32::default(), layout: ImageLayout::default(), aspect_mask: ImageAspectFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AttachmentReference2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_REFERENCE_2; } pub unsafe trait ExtendsAttachmentReference2 {} impl<'a> AttachmentReference2<'a> { #[inline] pub fn attachment(mut self, attachment: u32) -> Self { self.attachment = attachment; self } #[inline] pub fn layout(mut self, layout: ImageLayout) -> Self { self.layout = layout; self } #[inline] pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self { self.aspect_mask = aspect_mask; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassDescription2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SubpassDescriptionFlags, pub pipeline_bind_point: PipelineBindPoint, pub view_mask: u32, pub input_attachment_count: u32, pub p_input_attachments: *const AttachmentReference2<'a>, pub color_attachment_count: u32, pub p_color_attachments: *const AttachmentReference2<'a>, pub p_resolve_attachments: *const AttachmentReference2<'a>, pub p_depth_stencil_attachment: *const AttachmentReference2<'a>, pub preserve_attachment_count: u32, pub p_preserve_attachments: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassDescription2<'_> {} unsafe impl Sync for SubpassDescription2<'_> {} impl ::core::default::Default for SubpassDescription2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SubpassDescriptionFlags::default(), pipeline_bind_point: PipelineBindPoint::default(), view_mask: u32::default(), input_attachment_count: u32::default(), p_input_attachments: ::core::ptr::null(), color_attachment_count: u32::default(), p_color_attachments: ::core::ptr::null(), p_resolve_attachments: ::core::ptr::null(), p_depth_stencil_attachment: ::core::ptr::null(), preserve_attachment_count: u32::default(), p_preserve_attachments: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassDescription2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DESCRIPTION_2; } pub unsafe trait ExtendsSubpassDescription2 {} impl<'a> SubpassDescription2<'a> { #[inline] pub fn flags(mut self, flags: SubpassDescriptionFlags) -> Self { self.flags = flags; self } #[inline] pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self { self.pipeline_bind_point = pipeline_bind_point; self } #[inline] pub fn view_mask(mut self, view_mask: u32) -> Self { self.view_mask = view_mask; self } #[inline] pub fn input_attachments(mut self, input_attachments: &'a [AttachmentReference2<'a>]) -> Self { self.input_attachment_count = input_attachments.len() as _; self.p_input_attachments = input_attachments.as_ptr(); self } #[inline] pub fn color_attachments(mut self, color_attachments: &'a [AttachmentReference2<'a>]) -> Self { self.color_attachment_count = color_attachments.len() as _; self.p_color_attachments = color_attachments.as_ptr(); self } #[inline] pub fn resolve_attachments( mut self, resolve_attachments: &'a [AttachmentReference2<'a>], ) -> Self { self.color_attachment_count = resolve_attachments.len() as _; self.p_resolve_attachments = resolve_attachments.as_ptr(); self } #[inline] pub fn depth_stencil_attachment( mut self, depth_stencil_attachment: &'a AttachmentReference2<'a>, ) -> Self { self.p_depth_stencil_attachment = depth_stencil_attachment; self } #[inline] pub fn preserve_attachments(mut self, preserve_attachments: &'a [u32]) -> Self { self.preserve_attachment_count = preserve_attachments.len() as _; self.p_preserve_attachments = preserve_attachments.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassDependency2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_subpass: u32, pub dst_subpass: u32, pub src_stage_mask: PipelineStageFlags, pub dst_stage_mask: PipelineStageFlags, pub src_access_mask: AccessFlags, pub dst_access_mask: AccessFlags, pub dependency_flags: DependencyFlags, pub view_offset: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassDependency2<'_> {} unsafe impl Sync for SubpassDependency2<'_> {} impl ::core::default::Default for SubpassDependency2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_subpass: u32::default(), dst_subpass: u32::default(), src_stage_mask: PipelineStageFlags::default(), dst_stage_mask: PipelineStageFlags::default(), src_access_mask: AccessFlags::default(), dst_access_mask: AccessFlags::default(), dependency_flags: DependencyFlags::default(), view_offset: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassDependency2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DEPENDENCY_2; } pub unsafe trait ExtendsSubpassDependency2 {} impl<'a> SubpassDependency2<'a> { #[inline] pub fn src_subpass(mut self, src_subpass: u32) -> Self { self.src_subpass = src_subpass; self } #[inline] pub fn dst_subpass(mut self, dst_subpass: u32) -> Self { self.dst_subpass = dst_subpass; self } #[inline] pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags) -> Self { self.src_stage_mask = src_stage_mask; self } #[inline] pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags) -> Self { self.dst_stage_mask = dst_stage_mask; self } #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self { self.dependency_flags = dependency_flags; self } #[inline] pub fn view_offset(mut self, view_offset: i32) -> Self { self.view_offset = view_offset; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassCreateInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: RenderPassCreateFlags, pub attachment_count: u32, pub p_attachments: *const AttachmentDescription2<'a>, pub subpass_count: u32, pub p_subpasses: *const SubpassDescription2<'a>, pub dependency_count: u32, pub p_dependencies: *const SubpassDependency2<'a>, pub correlated_view_mask_count: u32, pub p_correlated_view_masks: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassCreateInfo2<'_> {} unsafe impl Sync for RenderPassCreateInfo2<'_> {} impl ::core::default::Default for RenderPassCreateInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: RenderPassCreateFlags::default(), attachment_count: u32::default(), p_attachments: ::core::ptr::null(), subpass_count: u32::default(), p_subpasses: ::core::ptr::null(), dependency_count: u32::default(), p_dependencies: ::core::ptr::null(), correlated_view_mask_count: u32::default(), p_correlated_view_masks: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassCreateInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATE_INFO_2; } pub unsafe trait ExtendsRenderPassCreateInfo2 {} impl<'a> RenderPassCreateInfo2<'a> { #[inline] pub fn flags(mut self, flags: RenderPassCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn attachments(mut self, attachments: &'a [AttachmentDescription2<'a>]) -> Self { self.attachment_count = attachments.len() as _; self.p_attachments = attachments.as_ptr(); self } #[inline] pub fn subpasses(mut self, subpasses: &'a [SubpassDescription2<'a>]) -> Self { self.subpass_count = subpasses.len() as _; self.p_subpasses = subpasses.as_ptr(); self } #[inline] pub fn dependencies(mut self, dependencies: &'a [SubpassDependency2<'a>]) -> Self { self.dependency_count = dependencies.len() as _; self.p_dependencies = dependencies.as_ptr(); self } #[inline] pub fn correlated_view_masks(mut self, correlated_view_masks: &'a [u32]) -> Self { self.correlated_view_mask_count = correlated_view_masks.len() as _; self.p_correlated_view_masks = correlated_view_masks.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub contents: SubpassContents, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassBeginInfo<'_> {} unsafe impl Sync for SubpassBeginInfo<'_> {} impl ::core::default::Default for SubpassBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), contents: SubpassContents::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_BEGIN_INFO; } impl<'a> SubpassBeginInfo<'a> { #[inline] pub fn contents(mut self, contents: SubpassContents) -> Self { self.contents = contents; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassEndInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassEndInfo<'_> {} unsafe impl Sync for SubpassEndInfo<'_> {} impl ::core::default::Default for SubpassEndInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassEndInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_END_INFO; } pub unsafe trait ExtendsSubpassEndInfo {} impl<'a> SubpassEndInfo<'a> { #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTimelineSemaphoreFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub timeline_semaphore: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTimelineSemaphoreFeatures<'_> {} unsafe impl Sync for PhysicalDeviceTimelineSemaphoreFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceTimelineSemaphoreFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), timeline_semaphore: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTimelineSemaphoreFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTimelineSemaphoreFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTimelineSemaphoreFeatures<'_> {} impl<'a> PhysicalDeviceTimelineSemaphoreFeatures<'a> { #[inline] pub fn timeline_semaphore(mut self, timeline_semaphore: bool) -> Self { self.timeline_semaphore = timeline_semaphore.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTimelineSemaphoreProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_timeline_semaphore_value_difference: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTimelineSemaphoreProperties<'_> {} unsafe impl Sync for PhysicalDeviceTimelineSemaphoreProperties<'_> {} impl ::core::default::Default for PhysicalDeviceTimelineSemaphoreProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_timeline_semaphore_value_difference: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTimelineSemaphoreProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTimelineSemaphoreProperties<'_> {} impl<'a> PhysicalDeviceTimelineSemaphoreProperties<'a> { #[inline] pub fn max_timeline_semaphore_value_difference( mut self, max_timeline_semaphore_value_difference: u64, ) -> Self { self.max_timeline_semaphore_value_difference = max_timeline_semaphore_value_difference; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreTypeCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore_type: SemaphoreType, pub initial_value: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreTypeCreateInfo<'_> {} unsafe impl Sync for SemaphoreTypeCreateInfo<'_> {} impl ::core::default::Default for SemaphoreTypeCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore_type: SemaphoreType::default(), initial_value: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreTypeCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_TYPE_CREATE_INFO; } unsafe impl ExtendsSemaphoreCreateInfo for SemaphoreTypeCreateInfo<'_> {} unsafe impl ExtendsPhysicalDeviceExternalSemaphoreInfo for SemaphoreTypeCreateInfo<'_> {} impl<'a> SemaphoreTypeCreateInfo<'a> { #[inline] pub fn semaphore_type(mut self, semaphore_type: SemaphoreType) -> Self { self.semaphore_type = semaphore_type; self } #[inline] pub fn initial_value(mut self, initial_value: u64) -> Self { self.initial_value = initial_value; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct TimelineSemaphoreSubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub wait_semaphore_value_count: u32, pub p_wait_semaphore_values: *const u64, pub signal_semaphore_value_count: u32, pub p_signal_semaphore_values: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for TimelineSemaphoreSubmitInfo<'_> {} unsafe impl Sync for TimelineSemaphoreSubmitInfo<'_> {} impl ::core::default::Default for TimelineSemaphoreSubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), wait_semaphore_value_count: u32::default(), p_wait_semaphore_values: ::core::ptr::null(), signal_semaphore_value_count: u32::default(), p_signal_semaphore_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for TimelineSemaphoreSubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::TIMELINE_SEMAPHORE_SUBMIT_INFO; } unsafe impl ExtendsSubmitInfo for TimelineSemaphoreSubmitInfo<'_> {} unsafe impl ExtendsBindSparseInfo for TimelineSemaphoreSubmitInfo<'_> {} impl<'a> TimelineSemaphoreSubmitInfo<'a> { #[inline] pub fn wait_semaphore_values(mut self, wait_semaphore_values: &'a [u64]) -> Self { self.wait_semaphore_value_count = wait_semaphore_values.len() as _; self.p_wait_semaphore_values = wait_semaphore_values.as_ptr(); self } #[inline] pub fn signal_semaphore_values(mut self, signal_semaphore_values: &'a [u64]) -> Self { self.signal_semaphore_value_count = signal_semaphore_values.len() as _; self.p_signal_semaphore_values = signal_semaphore_values.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreWaitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SemaphoreWaitFlags, pub semaphore_count: u32, pub p_semaphores: *const Semaphore, pub p_values: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreWaitInfo<'_> {} unsafe impl Sync for SemaphoreWaitInfo<'_> {} impl ::core::default::Default for SemaphoreWaitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SemaphoreWaitFlags::default(), semaphore_count: u32::default(), p_semaphores: ::core::ptr::null(), p_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreWaitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_WAIT_INFO; } impl<'a> SemaphoreWaitInfo<'a> { #[inline] pub fn flags(mut self, flags: SemaphoreWaitFlags) -> Self { self.flags = flags; self } #[inline] pub fn semaphores(mut self, semaphores: &'a [Semaphore]) -> Self { self.semaphore_count = semaphores.len() as _; self.p_semaphores = semaphores.as_ptr(); self } #[inline] pub fn values(mut self, values: &'a [u64]) -> Self { self.semaphore_count = values.len() as _; self.p_values = values.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreSignalInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub value: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreSignalInfo<'_> {} unsafe impl Sync for SemaphoreSignalInfo<'_> {} impl ::core::default::Default for SemaphoreSignalInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), value: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreSignalInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_SIGNAL_INFO; } impl<'a> SemaphoreSignalInfo<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn value(mut self, value: u64) -> Self { self.value = value; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VertexInputBindingDivisorDescriptionKHR { pub binding: u32, pub divisor: u32, } impl VertexInputBindingDivisorDescriptionKHR { #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn divisor(mut self, divisor: u32) -> Self { self.divisor = divisor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineVertexInputDivisorStateCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub vertex_binding_divisor_count: u32, pub p_vertex_binding_divisors: *const VertexInputBindingDivisorDescriptionKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineVertexInputDivisorStateCreateInfoKHR<'_> {} unsafe impl Sync for PipelineVertexInputDivisorStateCreateInfoKHR<'_> {} impl ::core::default::Default for PipelineVertexInputDivisorStateCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), vertex_binding_divisor_count: u32::default(), p_vertex_binding_divisors: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineVertexInputDivisorStateCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR; } unsafe impl ExtendsPipelineVertexInputStateCreateInfo for PipelineVertexInputDivisorStateCreateInfoKHR<'_> { } impl<'a> PipelineVertexInputDivisorStateCreateInfoKHR<'a> { #[inline] pub fn vertex_binding_divisors( mut self, vertex_binding_divisors: &'a [VertexInputBindingDivisorDescriptionKHR], ) -> Self { self.vertex_binding_divisor_count = vertex_binding_divisors.len() as _; self.p_vertex_binding_divisors = vertex_binding_divisors.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_vertex_attrib_divisor: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_vertex_attrib_divisor: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'_> { } impl<'a> PhysicalDeviceVertexAttributeDivisorPropertiesEXT<'a> { #[inline] pub fn max_vertex_attrib_divisor(mut self, max_vertex_attrib_divisor: u32) -> Self { self.max_vertex_attrib_divisor = max_vertex_attrib_divisor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_vertex_attrib_divisor: u32, pub supports_non_zero_first_instance: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_vertex_attrib_divisor: u32::default(), supports_non_zero_first_instance: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'_> { } impl<'a> PhysicalDeviceVertexAttributeDivisorPropertiesKHR<'a> { #[inline] pub fn max_vertex_attrib_divisor(mut self, max_vertex_attrib_divisor: u32) -> Self { self.max_vertex_attrib_divisor = max_vertex_attrib_divisor; self } #[inline] pub fn supports_non_zero_first_instance( mut self, supports_non_zero_first_instance: bool, ) -> Self { self.supports_non_zero_first_instance = supports_non_zero_first_instance.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePCIBusInfoPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pci_domain: u32, pub pci_bus: u32, pub pci_device: u32, pub pci_function: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePCIBusInfoPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDevicePCIBusInfoPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePCIBusInfoPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pci_domain: u32::default(), pci_bus: u32::default(), pci_device: u32::default(), pci_function: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePCIBusInfoPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePCIBusInfoPropertiesEXT<'_> {} impl<'a> PhysicalDevicePCIBusInfoPropertiesEXT<'a> { #[inline] pub fn pci_domain(mut self, pci_domain: u32) -> Self { self.pci_domain = pci_domain; self } #[inline] pub fn pci_bus(mut self, pci_bus: u32) -> Self { self.pci_bus = pci_bus; self } #[inline] pub fn pci_device(mut self, pci_device: u32) -> Self { self.pci_device = pci_device; self } #[inline] pub fn pci_function(mut self, pci_function: u32) -> Self { self.pci_function = pci_function; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportAndroidHardwareBufferInfoANDROID<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: *mut AHardwareBuffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportAndroidHardwareBufferInfoANDROID<'_> {} unsafe impl Sync for ImportAndroidHardwareBufferInfoANDROID<'_> {} impl ::core::default::Default for ImportAndroidHardwareBufferInfoANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportAndroidHardwareBufferInfoANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; } unsafe impl ExtendsMemoryAllocateInfo for ImportAndroidHardwareBufferInfoANDROID<'_> {} impl<'a> ImportAndroidHardwareBufferInfoANDROID<'a> { #[inline] pub fn buffer(mut self, buffer: *mut AHardwareBuffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidHardwareBufferUsageANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub android_hardware_buffer_usage: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidHardwareBufferUsageANDROID<'_> {} unsafe impl Sync for AndroidHardwareBufferUsageANDROID<'_> {} impl ::core::default::Default for AndroidHardwareBufferUsageANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), android_hardware_buffer_usage: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidHardwareBufferUsageANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID; } unsafe impl ExtendsImageFormatProperties2 for AndroidHardwareBufferUsageANDROID<'_> {} impl<'a> AndroidHardwareBufferUsageANDROID<'a> { #[inline] pub fn android_hardware_buffer_usage(mut self, android_hardware_buffer_usage: u64) -> Self { self.android_hardware_buffer_usage = android_hardware_buffer_usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidHardwareBufferPropertiesANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub allocation_size: DeviceSize, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidHardwareBufferPropertiesANDROID<'_> {} unsafe impl Sync for AndroidHardwareBufferPropertiesANDROID<'_> {} impl ::core::default::Default for AndroidHardwareBufferPropertiesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), allocation_size: DeviceSize::default(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidHardwareBufferPropertiesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID; } pub unsafe trait ExtendsAndroidHardwareBufferPropertiesANDROID {} impl<'a> AndroidHardwareBufferPropertiesANDROID<'a> { #[inline] pub fn allocation_size(mut self, allocation_size: DeviceSize) -> Self { self.allocation_size = allocation_size; self } #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryGetAndroidHardwareBufferInfoANDROID<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryGetAndroidHardwareBufferInfoANDROID<'_> {} unsafe impl Sync for MemoryGetAndroidHardwareBufferInfoANDROID<'_> {} impl ::core::default::Default for MemoryGetAndroidHardwareBufferInfoANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryGetAndroidHardwareBufferInfoANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID; } impl<'a> MemoryGetAndroidHardwareBufferInfoANDROID<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidHardwareBufferFormatPropertiesANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format: Format, pub external_format: u64, pub format_features: FormatFeatureFlags, pub sampler_ycbcr_conversion_components: ComponentMapping, pub suggested_ycbcr_model: SamplerYcbcrModelConversion, pub suggested_ycbcr_range: SamplerYcbcrRange, pub suggested_x_chroma_offset: ChromaLocation, pub suggested_y_chroma_offset: ChromaLocation, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidHardwareBufferFormatPropertiesANDROID<'_> {} unsafe impl Sync for AndroidHardwareBufferFormatPropertiesANDROID<'_> {} impl ::core::default::Default for AndroidHardwareBufferFormatPropertiesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format: Format::default(), external_format: u64::default(), format_features: FormatFeatureFlags::default(), sampler_ycbcr_conversion_components: ComponentMapping::default(), suggested_ycbcr_model: SamplerYcbcrModelConversion::default(), suggested_ycbcr_range: SamplerYcbcrRange::default(), suggested_x_chroma_offset: ChromaLocation::default(), suggested_y_chroma_offset: ChromaLocation::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidHardwareBufferFormatPropertiesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID; } unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID for AndroidHardwareBufferFormatPropertiesANDROID<'_> { } impl<'a> AndroidHardwareBufferFormatPropertiesANDROID<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn external_format(mut self, external_format: u64) -> Self { self.external_format = external_format; self } #[inline] pub fn format_features(mut self, format_features: FormatFeatureFlags) -> Self { self.format_features = format_features; self } #[inline] pub fn sampler_ycbcr_conversion_components( mut self, sampler_ycbcr_conversion_components: ComponentMapping, ) -> Self { self.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components; self } #[inline] pub fn suggested_ycbcr_model( mut self, suggested_ycbcr_model: SamplerYcbcrModelConversion, ) -> Self { self.suggested_ycbcr_model = suggested_ycbcr_model; self } #[inline] pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self { self.suggested_ycbcr_range = suggested_ycbcr_range; self } #[inline] pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self { self.suggested_x_chroma_offset = suggested_x_chroma_offset; self } #[inline] pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self { self.suggested_y_chroma_offset = suggested_y_chroma_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferInheritanceConditionalRenderingInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub conditional_rendering_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferInheritanceConditionalRenderingInfoEXT<'_> {} unsafe impl Sync for CommandBufferInheritanceConditionalRenderingInfoEXT<'_> {} impl ::core::default::Default for CommandBufferInheritanceConditionalRenderingInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), conditional_rendering_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferInheritanceConditionalRenderingInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT; } unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceConditionalRenderingInfoEXT<'_> { } impl<'a> CommandBufferInheritanceConditionalRenderingInfoEXT<'a> { #[inline] pub fn conditional_rendering_enable(mut self, conditional_rendering_enable: bool) -> Self { self.conditional_rendering_enable = conditional_rendering_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalFormatANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_format: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalFormatANDROID<'_> {} unsafe impl Sync for ExternalFormatANDROID<'_> {} impl ::core::default::Default for ExternalFormatANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_format: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalFormatANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_FORMAT_ANDROID; } unsafe impl ExtendsImageCreateInfo for ExternalFormatANDROID<'_> {} unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatANDROID<'_> {} unsafe impl ExtendsAttachmentDescription2 for ExternalFormatANDROID<'_> {} unsafe impl ExtendsGraphicsPipelineCreateInfo for ExternalFormatANDROID<'_> {} unsafe impl ExtendsCommandBufferInheritanceInfo for ExternalFormatANDROID<'_> {} impl<'a> ExternalFormatANDROID<'a> { #[inline] pub fn external_format(mut self, external_format: u64) -> Self { self.external_format = external_format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevice8BitStorageFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub storage_buffer8_bit_access: Bool32, pub uniform_and_storage_buffer8_bit_access: Bool32, pub storage_push_constant8: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevice8BitStorageFeatures<'_> {} unsafe impl Sync for PhysicalDevice8BitStorageFeatures<'_> {} impl ::core::default::Default for PhysicalDevice8BitStorageFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), storage_buffer8_bit_access: Bool32::default(), uniform_and_storage_buffer8_bit_access: Bool32::default(), storage_push_constant8: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevice8BitStorageFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice8BitStorageFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice8BitStorageFeatures<'_> {} impl<'a> PhysicalDevice8BitStorageFeatures<'a> { #[inline] pub fn storage_buffer8_bit_access(mut self, storage_buffer8_bit_access: bool) -> Self { self.storage_buffer8_bit_access = storage_buffer8_bit_access.into(); self } #[inline] pub fn uniform_and_storage_buffer8_bit_access( mut self, uniform_and_storage_buffer8_bit_access: bool, ) -> Self { self.uniform_and_storage_buffer8_bit_access = uniform_and_storage_buffer8_bit_access.into(); self } #[inline] pub fn storage_push_constant8(mut self, storage_push_constant8: bool) -> Self { self.storage_push_constant8 = storage_push_constant8.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceConditionalRenderingFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub conditional_rendering: Bool32, pub inherited_conditional_rendering: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceConditionalRenderingFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceConditionalRenderingFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceConditionalRenderingFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), conditional_rendering: Bool32::default(), inherited_conditional_rendering: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceConditionalRenderingFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceConditionalRenderingFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceConditionalRenderingFeaturesEXT<'_> {} impl<'a> PhysicalDeviceConditionalRenderingFeaturesEXT<'a> { #[inline] pub fn conditional_rendering(mut self, conditional_rendering: bool) -> Self { self.conditional_rendering = conditional_rendering.into(); self } #[inline] pub fn inherited_conditional_rendering( mut self, inherited_conditional_rendering: bool, ) -> Self { self.inherited_conditional_rendering = inherited_conditional_rendering.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkanMemoryModelFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub vulkan_memory_model: Bool32, pub vulkan_memory_model_device_scope: Bool32, pub vulkan_memory_model_availability_visibility_chains: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkanMemoryModelFeatures<'_> {} unsafe impl Sync for PhysicalDeviceVulkanMemoryModelFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceVulkanMemoryModelFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), vulkan_memory_model: Bool32::default(), vulkan_memory_model_device_scope: Bool32::default(), vulkan_memory_model_availability_visibility_chains: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkanMemoryModelFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkanMemoryModelFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkanMemoryModelFeatures<'_> {} impl<'a> PhysicalDeviceVulkanMemoryModelFeatures<'a> { #[inline] pub fn vulkan_memory_model(mut self, vulkan_memory_model: bool) -> Self { self.vulkan_memory_model = vulkan_memory_model.into(); self } #[inline] pub fn vulkan_memory_model_device_scope( mut self, vulkan_memory_model_device_scope: bool, ) -> Self { self.vulkan_memory_model_device_scope = vulkan_memory_model_device_scope.into(); self } #[inline] pub fn vulkan_memory_model_availability_visibility_chains( mut self, vulkan_memory_model_availability_visibility_chains: bool, ) -> Self { self.vulkan_memory_model_availability_visibility_chains = vulkan_memory_model_availability_visibility_chains.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderAtomicInt64Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_buffer_int64_atomics: Bool32, pub shader_shared_int64_atomics: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderAtomicInt64Features<'_> {} unsafe impl Sync for PhysicalDeviceShaderAtomicInt64Features<'_> {} impl ::core::default::Default for PhysicalDeviceShaderAtomicInt64Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_buffer_int64_atomics: Bool32::default(), shader_shared_int64_atomics: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderAtomicInt64Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicInt64Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicInt64Features<'_> {} impl<'a> PhysicalDeviceShaderAtomicInt64Features<'a> { #[inline] pub fn shader_buffer_int64_atomics(mut self, shader_buffer_int64_atomics: bool) -> Self { self.shader_buffer_int64_atomics = shader_buffer_int64_atomics.into(); self } #[inline] pub fn shader_shared_int64_atomics(mut self, shader_shared_int64_atomics: bool) -> Self { self.shader_shared_int64_atomics = shader_shared_int64_atomics.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderAtomicFloatFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_buffer_float32_atomics: Bool32, pub shader_buffer_float32_atomic_add: Bool32, pub shader_buffer_float64_atomics: Bool32, pub shader_buffer_float64_atomic_add: Bool32, pub shader_shared_float32_atomics: Bool32, pub shader_shared_float32_atomic_add: Bool32, pub shader_shared_float64_atomics: Bool32, pub shader_shared_float64_atomic_add: Bool32, pub shader_image_float32_atomics: Bool32, pub shader_image_float32_atomic_add: Bool32, pub sparse_image_float32_atomics: Bool32, pub sparse_image_float32_atomic_add: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_buffer_float32_atomics: Bool32::default(), shader_buffer_float32_atomic_add: Bool32::default(), shader_buffer_float64_atomics: Bool32::default(), shader_buffer_float64_atomic_add: Bool32::default(), shader_shared_float32_atomics: Bool32::default(), shader_shared_float32_atomic_add: Bool32::default(), shader_shared_float64_atomics: Bool32::default(), shader_shared_float64_atomic_add: Bool32::default(), shader_image_float32_atomics: Bool32::default(), shader_image_float32_atomic_add: Bool32::default(), sparse_image_float32_atomics: Bool32::default(), sparse_image_float32_atomic_add: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloatFeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderAtomicFloatFeaturesEXT<'a> { #[inline] pub fn shader_buffer_float32_atomics(mut self, shader_buffer_float32_atomics: bool) -> Self { self.shader_buffer_float32_atomics = shader_buffer_float32_atomics.into(); self } #[inline] pub fn shader_buffer_float32_atomic_add( mut self, shader_buffer_float32_atomic_add: bool, ) -> Self { self.shader_buffer_float32_atomic_add = shader_buffer_float32_atomic_add.into(); self } #[inline] pub fn shader_buffer_float64_atomics(mut self, shader_buffer_float64_atomics: bool) -> Self { self.shader_buffer_float64_atomics = shader_buffer_float64_atomics.into(); self } #[inline] pub fn shader_buffer_float64_atomic_add( mut self, shader_buffer_float64_atomic_add: bool, ) -> Self { self.shader_buffer_float64_atomic_add = shader_buffer_float64_atomic_add.into(); self } #[inline] pub fn shader_shared_float32_atomics(mut self, shader_shared_float32_atomics: bool) -> Self { self.shader_shared_float32_atomics = shader_shared_float32_atomics.into(); self } #[inline] pub fn shader_shared_float32_atomic_add( mut self, shader_shared_float32_atomic_add: bool, ) -> Self { self.shader_shared_float32_atomic_add = shader_shared_float32_atomic_add.into(); self } #[inline] pub fn shader_shared_float64_atomics(mut self, shader_shared_float64_atomics: bool) -> Self { self.shader_shared_float64_atomics = shader_shared_float64_atomics.into(); self } #[inline] pub fn shader_shared_float64_atomic_add( mut self, shader_shared_float64_atomic_add: bool, ) -> Self { self.shader_shared_float64_atomic_add = shader_shared_float64_atomic_add.into(); self } #[inline] pub fn shader_image_float32_atomics(mut self, shader_image_float32_atomics: bool) -> Self { self.shader_image_float32_atomics = shader_image_float32_atomics.into(); self } #[inline] pub fn shader_image_float32_atomic_add( mut self, shader_image_float32_atomic_add: bool, ) -> Self { self.shader_image_float32_atomic_add = shader_image_float32_atomic_add.into(); self } #[inline] pub fn sparse_image_float32_atomics(mut self, sparse_image_float32_atomics: bool) -> Self { self.sparse_image_float32_atomics = sparse_image_float32_atomics.into(); self } #[inline] pub fn sparse_image_float32_atomic_add( mut self, sparse_image_float32_atomic_add: bool, ) -> Self { self.sparse_image_float32_atomic_add = sparse_image_float32_atomic_add.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_buffer_float16_atomics: Bool32, pub shader_buffer_float16_atomic_add: Bool32, pub shader_buffer_float16_atomic_min_max: Bool32, pub shader_buffer_float32_atomic_min_max: Bool32, pub shader_buffer_float64_atomic_min_max: Bool32, pub shader_shared_float16_atomics: Bool32, pub shader_shared_float16_atomic_add: Bool32, pub shader_shared_float16_atomic_min_max: Bool32, pub shader_shared_float32_atomic_min_max: Bool32, pub shader_shared_float64_atomic_min_max: Bool32, pub shader_image_float32_atomic_min_max: Bool32, pub sparse_image_float32_atomic_min_max: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_buffer_float16_atomics: Bool32::default(), shader_buffer_float16_atomic_add: Bool32::default(), shader_buffer_float16_atomic_min_max: Bool32::default(), shader_buffer_float32_atomic_min_max: Bool32::default(), shader_buffer_float64_atomic_min_max: Bool32::default(), shader_shared_float16_atomics: Bool32::default(), shader_shared_float16_atomic_add: Bool32::default(), shader_shared_float16_atomic_min_max: Bool32::default(), shader_shared_float32_atomic_min_max: Bool32::default(), shader_shared_float64_atomic_min_max: Bool32::default(), shader_image_float32_atomic_min_max: Bool32::default(), sparse_image_float32_atomic_min_max: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderAtomicFloat2FeaturesEXT<'a> { #[inline] pub fn shader_buffer_float16_atomics(mut self, shader_buffer_float16_atomics: bool) -> Self { self.shader_buffer_float16_atomics = shader_buffer_float16_atomics.into(); self } #[inline] pub fn shader_buffer_float16_atomic_add( mut self, shader_buffer_float16_atomic_add: bool, ) -> Self { self.shader_buffer_float16_atomic_add = shader_buffer_float16_atomic_add.into(); self } #[inline] pub fn shader_buffer_float16_atomic_min_max( mut self, shader_buffer_float16_atomic_min_max: bool, ) -> Self { self.shader_buffer_float16_atomic_min_max = shader_buffer_float16_atomic_min_max.into(); self } #[inline] pub fn shader_buffer_float32_atomic_min_max( mut self, shader_buffer_float32_atomic_min_max: bool, ) -> Self { self.shader_buffer_float32_atomic_min_max = shader_buffer_float32_atomic_min_max.into(); self } #[inline] pub fn shader_buffer_float64_atomic_min_max( mut self, shader_buffer_float64_atomic_min_max: bool, ) -> Self { self.shader_buffer_float64_atomic_min_max = shader_buffer_float64_atomic_min_max.into(); self } #[inline] pub fn shader_shared_float16_atomics(mut self, shader_shared_float16_atomics: bool) -> Self { self.shader_shared_float16_atomics = shader_shared_float16_atomics.into(); self } #[inline] pub fn shader_shared_float16_atomic_add( mut self, shader_shared_float16_atomic_add: bool, ) -> Self { self.shader_shared_float16_atomic_add = shader_shared_float16_atomic_add.into(); self } #[inline] pub fn shader_shared_float16_atomic_min_max( mut self, shader_shared_float16_atomic_min_max: bool, ) -> Self { self.shader_shared_float16_atomic_min_max = shader_shared_float16_atomic_min_max.into(); self } #[inline] pub fn shader_shared_float32_atomic_min_max( mut self, shader_shared_float32_atomic_min_max: bool, ) -> Self { self.shader_shared_float32_atomic_min_max = shader_shared_float32_atomic_min_max.into(); self } #[inline] pub fn shader_shared_float64_atomic_min_max( mut self, shader_shared_float64_atomic_min_max: bool, ) -> Self { self.shader_shared_float64_atomic_min_max = shader_shared_float64_atomic_min_max.into(); self } #[inline] pub fn shader_image_float32_atomic_min_max( mut self, shader_image_float32_atomic_min_max: bool, ) -> Self { self.shader_image_float32_atomic_min_max = shader_image_float32_atomic_min_max.into(); self } #[inline] pub fn sparse_image_float32_atomic_min_max( mut self, sparse_image_float32_atomic_min_max: bool, ) -> Self { self.sparse_image_float32_atomic_min_max = sparse_image_float32_atomic_min_max.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub vertex_attribute_instance_rate_divisor: Bool32, pub vertex_attribute_instance_rate_zero_divisor: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), vertex_attribute_instance_rate_divisor: Bool32::default(), vertex_attribute_instance_rate_zero_divisor: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'_> {} impl<'a> PhysicalDeviceVertexAttributeDivisorFeaturesKHR<'a> { #[inline] pub fn vertex_attribute_instance_rate_divisor( mut self, vertex_attribute_instance_rate_divisor: bool, ) -> Self { self.vertex_attribute_instance_rate_divisor = vertex_attribute_instance_rate_divisor.into(); self } #[inline] pub fn vertex_attribute_instance_rate_zero_divisor( mut self, vertex_attribute_instance_rate_zero_divisor: bool, ) -> Self { self.vertex_attribute_instance_rate_zero_divisor = vertex_attribute_instance_rate_zero_divisor.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyCheckpointPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub checkpoint_execution_stage_mask: PipelineStageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyCheckpointPropertiesNV<'_> {} unsafe impl Sync for QueueFamilyCheckpointPropertiesNV<'_> {} impl ::core::default::Default for QueueFamilyCheckpointPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), checkpoint_execution_stage_mask: PipelineStageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyCheckpointPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV; } unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointPropertiesNV<'_> {} impl<'a> QueueFamilyCheckpointPropertiesNV<'a> { #[inline] pub fn checkpoint_execution_stage_mask( mut self, checkpoint_execution_stage_mask: PipelineStageFlags, ) -> Self { self.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CheckpointDataNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub stage: PipelineStageFlags, pub p_checkpoint_marker: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CheckpointDataNV<'_> {} unsafe impl Sync for CheckpointDataNV<'_> {} impl ::core::default::Default for CheckpointDataNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), stage: PipelineStageFlags::default(), p_checkpoint_marker: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CheckpointDataNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CHECKPOINT_DATA_NV; } impl<'a> CheckpointDataNV<'a> { #[inline] pub fn stage(mut self, stage: PipelineStageFlags) -> Self { self.stage = stage; self } #[inline] pub fn checkpoint_marker(mut self, checkpoint_marker: *mut c_void) -> Self { self.p_checkpoint_marker = checkpoint_marker; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDepthStencilResolveProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supported_depth_resolve_modes: ResolveModeFlags, pub supported_stencil_resolve_modes: ResolveModeFlags, pub independent_resolve_none: Bool32, pub independent_resolve: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDepthStencilResolveProperties<'_> {} unsafe impl Sync for PhysicalDeviceDepthStencilResolveProperties<'_> {} impl ::core::default::Default for PhysicalDeviceDepthStencilResolveProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supported_depth_resolve_modes: ResolveModeFlags::default(), supported_stencil_resolve_modes: ResolveModeFlags::default(), independent_resolve_none: Bool32::default(), independent_resolve: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDepthStencilResolveProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDepthStencilResolveProperties<'_> {} impl<'a> PhysicalDeviceDepthStencilResolveProperties<'a> { #[inline] pub fn supported_depth_resolve_modes( mut self, supported_depth_resolve_modes: ResolveModeFlags, ) -> Self { self.supported_depth_resolve_modes = supported_depth_resolve_modes; self } #[inline] pub fn supported_stencil_resolve_modes( mut self, supported_stencil_resolve_modes: ResolveModeFlags, ) -> Self { self.supported_stencil_resolve_modes = supported_stencil_resolve_modes; self } #[inline] pub fn independent_resolve_none(mut self, independent_resolve_none: bool) -> Self { self.independent_resolve_none = independent_resolve_none.into(); self } #[inline] pub fn independent_resolve(mut self, independent_resolve: bool) -> Self { self.independent_resolve = independent_resolve.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassDescriptionDepthStencilResolve<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub depth_resolve_mode: ResolveModeFlags, pub stencil_resolve_mode: ResolveModeFlags, pub p_depth_stencil_resolve_attachment: *const AttachmentReference2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassDescriptionDepthStencilResolve<'_> {} unsafe impl Sync for SubpassDescriptionDepthStencilResolve<'_> {} impl ::core::default::Default for SubpassDescriptionDepthStencilResolve<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), depth_resolve_mode: ResolveModeFlags::default(), stencil_resolve_mode: ResolveModeFlags::default(), p_depth_stencil_resolve_attachment: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassDescriptionDepthStencilResolve<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE; } unsafe impl ExtendsSubpassDescription2 for SubpassDescriptionDepthStencilResolve<'_> {} impl<'a> SubpassDescriptionDepthStencilResolve<'a> { #[inline] pub fn depth_resolve_mode(mut self, depth_resolve_mode: ResolveModeFlags) -> Self { self.depth_resolve_mode = depth_resolve_mode; self } #[inline] pub fn stencil_resolve_mode(mut self, stencil_resolve_mode: ResolveModeFlags) -> Self { self.stencil_resolve_mode = stencil_resolve_mode; self } #[inline] pub fn depth_stencil_resolve_attachment( mut self, depth_stencil_resolve_attachment: &'a AttachmentReference2<'a>, ) -> Self { self.p_depth_stencil_resolve_attachment = depth_stencil_resolve_attachment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewASTCDecodeModeEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub decode_mode: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewASTCDecodeModeEXT<'_> {} unsafe impl Sync for ImageViewASTCDecodeModeEXT<'_> {} impl ::core::default::Default for ImageViewASTCDecodeModeEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), decode_mode: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewASTCDecodeModeEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_ASTC_DECODE_MODE_EXT; } unsafe impl ExtendsImageViewCreateInfo for ImageViewASTCDecodeModeEXT<'_> {} impl<'a> ImageViewASTCDecodeModeEXT<'a> { #[inline] pub fn decode_mode(mut self, decode_mode: Format) -> Self { self.decode_mode = decode_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceASTCDecodeFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub decode_mode_shared_exponent: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceASTCDecodeFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceASTCDecodeFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceASTCDecodeFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), decode_mode_shared_exponent: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceASTCDecodeFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceASTCDecodeFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceASTCDecodeFeaturesEXT<'_> {} impl<'a> PhysicalDeviceASTCDecodeFeaturesEXT<'a> { #[inline] pub fn decode_mode_shared_exponent(mut self, decode_mode_shared_exponent: bool) -> Self { self.decode_mode_shared_exponent = decode_mode_shared_exponent.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTransformFeedbackFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub transform_feedback: Bool32, pub geometry_streams: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTransformFeedbackFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceTransformFeedbackFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceTransformFeedbackFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), transform_feedback: Bool32::default(), geometry_streams: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTransformFeedbackFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTransformFeedbackFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTransformFeedbackFeaturesEXT<'_> {} impl<'a> PhysicalDeviceTransformFeedbackFeaturesEXT<'a> { #[inline] pub fn transform_feedback(mut self, transform_feedback: bool) -> Self { self.transform_feedback = transform_feedback.into(); self } #[inline] pub fn geometry_streams(mut self, geometry_streams: bool) -> Self { self.geometry_streams = geometry_streams.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTransformFeedbackPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_transform_feedback_streams: u32, pub max_transform_feedback_buffers: u32, pub max_transform_feedback_buffer_size: DeviceSize, pub max_transform_feedback_stream_data_size: u32, pub max_transform_feedback_buffer_data_size: u32, pub max_transform_feedback_buffer_data_stride: u32, pub transform_feedback_queries: Bool32, pub transform_feedback_streams_lines_triangles: Bool32, pub transform_feedback_rasterization_stream_select: Bool32, pub transform_feedback_draw: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTransformFeedbackPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceTransformFeedbackPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceTransformFeedbackPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_transform_feedback_streams: u32::default(), max_transform_feedback_buffers: u32::default(), max_transform_feedback_buffer_size: DeviceSize::default(), max_transform_feedback_stream_data_size: u32::default(), max_transform_feedback_buffer_data_size: u32::default(), max_transform_feedback_buffer_data_stride: u32::default(), transform_feedback_queries: Bool32::default(), transform_feedback_streams_lines_triangles: Bool32::default(), transform_feedback_rasterization_stream_select: Bool32::default(), transform_feedback_draw: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTransformFeedbackPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTransformFeedbackPropertiesEXT<'_> {} impl<'a> PhysicalDeviceTransformFeedbackPropertiesEXT<'a> { #[inline] pub fn max_transform_feedback_streams(mut self, max_transform_feedback_streams: u32) -> Self { self.max_transform_feedback_streams = max_transform_feedback_streams; self } #[inline] pub fn max_transform_feedback_buffers(mut self, max_transform_feedback_buffers: u32) -> Self { self.max_transform_feedback_buffers = max_transform_feedback_buffers; self } #[inline] pub fn max_transform_feedback_buffer_size( mut self, max_transform_feedback_buffer_size: DeviceSize, ) -> Self { self.max_transform_feedback_buffer_size = max_transform_feedback_buffer_size; self } #[inline] pub fn max_transform_feedback_stream_data_size( mut self, max_transform_feedback_stream_data_size: u32, ) -> Self { self.max_transform_feedback_stream_data_size = max_transform_feedback_stream_data_size; self } #[inline] pub fn max_transform_feedback_buffer_data_size( mut self, max_transform_feedback_buffer_data_size: u32, ) -> Self { self.max_transform_feedback_buffer_data_size = max_transform_feedback_buffer_data_size; self } #[inline] pub fn max_transform_feedback_buffer_data_stride( mut self, max_transform_feedback_buffer_data_stride: u32, ) -> Self { self.max_transform_feedback_buffer_data_stride = max_transform_feedback_buffer_data_stride; self } #[inline] pub fn transform_feedback_queries(mut self, transform_feedback_queries: bool) -> Self { self.transform_feedback_queries = transform_feedback_queries.into(); self } #[inline] pub fn transform_feedback_streams_lines_triangles( mut self, transform_feedback_streams_lines_triangles: bool, ) -> Self { self.transform_feedback_streams_lines_triangles = transform_feedback_streams_lines_triangles.into(); self } #[inline] pub fn transform_feedback_rasterization_stream_select( mut self, transform_feedback_rasterization_stream_select: bool, ) -> Self { self.transform_feedback_rasterization_stream_select = transform_feedback_rasterization_stream_select.into(); self } #[inline] pub fn transform_feedback_draw(mut self, transform_feedback_draw: bool) -> Self { self.transform_feedback_draw = transform_feedback_draw.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationStateStreamCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineRasterizationStateStreamCreateFlagsEXT, pub rasterization_stream: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationStateStreamCreateInfoEXT<'_> {} unsafe impl Sync for PipelineRasterizationStateStreamCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineRasterizationStateStreamCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineRasterizationStateStreamCreateFlagsEXT::default(), rasterization_stream: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationStateStreamCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationStateStreamCreateInfoEXT<'_> { } impl<'a> PipelineRasterizationStateStreamCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: PipelineRasterizationStateStreamCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn rasterization_stream(mut self, rasterization_stream: u32) -> Self { self.rasterization_stream = rasterization_stream; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub representative_fragment_test: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), representative_fragment_test: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'_> {} impl<'a> PhysicalDeviceRepresentativeFragmentTestFeaturesNV<'a> { #[inline] pub fn representative_fragment_test(mut self, representative_fragment_test: bool) -> Self { self.representative_fragment_test = representative_fragment_test.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRepresentativeFragmentTestStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub representative_fragment_test_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRepresentativeFragmentTestStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineRepresentativeFragmentTestStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineRepresentativeFragmentTestStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), representative_fragment_test_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRepresentativeFragmentTestStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRepresentativeFragmentTestStateCreateInfoNV<'_> { } impl<'a> PipelineRepresentativeFragmentTestStateCreateInfoNV<'a> { #[inline] pub fn representative_fragment_test_enable( mut self, representative_fragment_test_enable: bool, ) -> Self { self.representative_fragment_test_enable = representative_fragment_test_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExclusiveScissorFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub exclusive_scissor: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExclusiveScissorFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceExclusiveScissorFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceExclusiveScissorFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), exclusive_scissor: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExclusiveScissorFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExclusiveScissorFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExclusiveScissorFeaturesNV<'_> {} impl<'a> PhysicalDeviceExclusiveScissorFeaturesNV<'a> { #[inline] pub fn exclusive_scissor(mut self, exclusive_scissor: bool) -> Self { self.exclusive_scissor = exclusive_scissor.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportExclusiveScissorStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub exclusive_scissor_count: u32, pub p_exclusive_scissors: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportExclusiveScissorStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineViewportExclusiveScissorStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineViewportExclusiveScissorStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), exclusive_scissor_count: u32::default(), p_exclusive_scissors: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportExclusiveScissorStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportExclusiveScissorStateCreateInfoNV<'_> { } impl<'a> PipelineViewportExclusiveScissorStateCreateInfoNV<'a> { #[inline] pub fn exclusive_scissors(mut self, exclusive_scissors: &'a [Rect2D]) -> Self { self.exclusive_scissor_count = exclusive_scissors.len() as _; self.p_exclusive_scissors = exclusive_scissors.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCornerSampledImageFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub corner_sampled_image: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCornerSampledImageFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceCornerSampledImageFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCornerSampledImageFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), corner_sampled_image: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCornerSampledImageFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCornerSampledImageFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCornerSampledImageFeaturesNV<'_> {} impl<'a> PhysicalDeviceCornerSampledImageFeaturesNV<'a> { #[inline] pub fn corner_sampled_image(mut self, corner_sampled_image: bool) -> Self { self.corner_sampled_image = corner_sampled_image.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceComputeShaderDerivativesFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub compute_derivative_group_quads: Bool32, pub compute_derivative_group_linear: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), compute_derivative_group_quads: Bool32::default(), compute_derivative_group_linear: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceComputeShaderDerivativesFeaturesNV<'_> {} impl<'a> PhysicalDeviceComputeShaderDerivativesFeaturesNV<'a> { #[inline] pub fn compute_derivative_group_quads(mut self, compute_derivative_group_quads: bool) -> Self { self.compute_derivative_group_quads = compute_derivative_group_quads.into(); self } #[inline] pub fn compute_derivative_group_linear( mut self, compute_derivative_group_linear: bool, ) -> Self { self.compute_derivative_group_linear = compute_derivative_group_linear.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderImageFootprintFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_footprint: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderImageFootprintFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceShaderImageFootprintFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShaderImageFootprintFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_footprint: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderImageFootprintFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderImageFootprintFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageFootprintFeaturesNV<'_> {} impl<'a> PhysicalDeviceShaderImageFootprintFeaturesNV<'a> { #[inline] pub fn image_footprint(mut self, image_footprint: bool) -> Self { self.image_footprint = image_footprint.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub dedicated_allocation_image_aliasing: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), dedicated_allocation_image_aliasing: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'_> { } impl<'a> PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV<'a> { #[inline] pub fn dedicated_allocation_image_aliasing( mut self, dedicated_allocation_image_aliasing: bool, ) -> Self { self.dedicated_allocation_image_aliasing = dedicated_allocation_image_aliasing.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCopyMemoryIndirectFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub indirect_copy: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), indirect_copy: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCopyMemoryIndirectFeaturesNV<'_> {} impl<'a> PhysicalDeviceCopyMemoryIndirectFeaturesNV<'a> { #[inline] pub fn indirect_copy(mut self, indirect_copy: bool) -> Self { self.indirect_copy = indirect_copy.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCopyMemoryIndirectPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supported_queues: QueueFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCopyMemoryIndirectPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceCopyMemoryIndirectPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCopyMemoryIndirectPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supported_queues: QueueFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCopyMemoryIndirectPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCopyMemoryIndirectPropertiesNV<'_> {} impl<'a> PhysicalDeviceCopyMemoryIndirectPropertiesNV<'a> { #[inline] pub fn supported_queues(mut self, supported_queues: QueueFlags) -> Self { self.supported_queues = supported_queues; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryDecompressionFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_decompression: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMemoryDecompressionFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceMemoryDecompressionFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceMemoryDecompressionFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_decompression: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMemoryDecompressionFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMemoryDecompressionFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryDecompressionFeaturesNV<'_> {} impl<'a> PhysicalDeviceMemoryDecompressionFeaturesNV<'a> { #[inline] pub fn memory_decompression(mut self, memory_decompression: bool) -> Self { self.memory_decompression = memory_decompression.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryDecompressionPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub decompression_methods: MemoryDecompressionMethodFlagsNV, pub max_decompression_indirect_count: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMemoryDecompressionPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceMemoryDecompressionPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceMemoryDecompressionPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), decompression_methods: MemoryDecompressionMethodFlagsNV::default(), max_decompression_indirect_count: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMemoryDecompressionPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMemoryDecompressionPropertiesNV<'_> {} impl<'a> PhysicalDeviceMemoryDecompressionPropertiesNV<'a> { #[inline] pub fn decompression_methods( mut self, decompression_methods: MemoryDecompressionMethodFlagsNV, ) -> Self { self.decompression_methods = decompression_methods; self } #[inline] pub fn max_decompression_indirect_count( mut self, max_decompression_indirect_count: u64, ) -> Self { self.max_decompression_indirect_count = max_decompression_indirect_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShadingRatePaletteNV<'a> { pub shading_rate_palette_entry_count: u32, pub p_shading_rate_palette_entries: *const ShadingRatePaletteEntryNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ShadingRatePaletteNV<'_> {} unsafe impl Sync for ShadingRatePaletteNV<'_> {} impl ::core::default::Default for ShadingRatePaletteNV<'_> { #[inline] fn default() -> Self { Self { shading_rate_palette_entry_count: u32::default(), p_shading_rate_palette_entries: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> ShadingRatePaletteNV<'a> { #[inline] pub fn shading_rate_palette_entries( mut self, shading_rate_palette_entries: &'a [ShadingRatePaletteEntryNV], ) -> Self { self.shading_rate_palette_entry_count = shading_rate_palette_entries.len() as _; self.p_shading_rate_palette_entries = shading_rate_palette_entries.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportShadingRateImageStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub shading_rate_image_enable: Bool32, pub viewport_count: u32, pub p_shading_rate_palettes: *const ShadingRatePaletteNV<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportShadingRateImageStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineViewportShadingRateImageStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineViewportShadingRateImageStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), shading_rate_image_enable: Bool32::default(), viewport_count: u32::default(), p_shading_rate_palettes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportShadingRateImageStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportShadingRateImageStateCreateInfoNV<'_> { } impl<'a> PipelineViewportShadingRateImageStateCreateInfoNV<'a> { #[inline] pub fn shading_rate_image_enable(mut self, shading_rate_image_enable: bool) -> Self { self.shading_rate_image_enable = shading_rate_image_enable.into(); self } #[inline] pub fn shading_rate_palettes( mut self, shading_rate_palettes: &'a [ShadingRatePaletteNV<'a>], ) -> Self { self.viewport_count = shading_rate_palettes.len() as _; self.p_shading_rate_palettes = shading_rate_palettes.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShadingRateImageFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shading_rate_image: Bool32, pub shading_rate_coarse_sample_order: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShadingRateImageFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceShadingRateImageFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShadingRateImageFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shading_rate_image: Bool32::default(), shading_rate_coarse_sample_order: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShadingRateImageFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShadingRateImageFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShadingRateImageFeaturesNV<'_> {} impl<'a> PhysicalDeviceShadingRateImageFeaturesNV<'a> { #[inline] pub fn shading_rate_image(mut self, shading_rate_image: bool) -> Self { self.shading_rate_image = shading_rate_image.into(); self } #[inline] pub fn shading_rate_coarse_sample_order( mut self, shading_rate_coarse_sample_order: bool, ) -> Self { self.shading_rate_coarse_sample_order = shading_rate_coarse_sample_order.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShadingRateImagePropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shading_rate_texel_size: Extent2D, pub shading_rate_palette_size: u32, pub shading_rate_max_coarse_samples: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShadingRateImagePropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceShadingRateImagePropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShadingRateImagePropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shading_rate_texel_size: Extent2D::default(), shading_rate_palette_size: u32::default(), shading_rate_max_coarse_samples: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShadingRateImagePropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShadingRateImagePropertiesNV<'_> {} impl<'a> PhysicalDeviceShadingRateImagePropertiesNV<'a> { #[inline] pub fn shading_rate_texel_size(mut self, shading_rate_texel_size: Extent2D) -> Self { self.shading_rate_texel_size = shading_rate_texel_size; self } #[inline] pub fn shading_rate_palette_size(mut self, shading_rate_palette_size: u32) -> Self { self.shading_rate_palette_size = shading_rate_palette_size; self } #[inline] pub fn shading_rate_max_coarse_samples(mut self, shading_rate_max_coarse_samples: u32) -> Self { self.shading_rate_max_coarse_samples = shading_rate_max_coarse_samples; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceInvocationMaskFeaturesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub invocation_mask: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), invocation_mask: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInvocationMaskFeaturesHUAWEI<'_> {} impl<'a> PhysicalDeviceInvocationMaskFeaturesHUAWEI<'a> { #[inline] pub fn invocation_mask(mut self, invocation_mask: bool) -> Self { self.invocation_mask = invocation_mask.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct CoarseSampleLocationNV { pub pixel_x: u32, pub pixel_y: u32, pub sample: u32, } impl CoarseSampleLocationNV { #[inline] pub fn pixel_x(mut self, pixel_x: u32) -> Self { self.pixel_x = pixel_x; self } #[inline] pub fn pixel_y(mut self, pixel_y: u32) -> Self { self.pixel_y = pixel_y; self } #[inline] pub fn sample(mut self, sample: u32) -> Self { self.sample = sample; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CoarseSampleOrderCustomNV<'a> { pub shading_rate: ShadingRatePaletteEntryNV, pub sample_count: u32, pub sample_location_count: u32, pub p_sample_locations: *const CoarseSampleLocationNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CoarseSampleOrderCustomNV<'_> {} unsafe impl Sync for CoarseSampleOrderCustomNV<'_> {} impl ::core::default::Default for CoarseSampleOrderCustomNV<'_> { #[inline] fn default() -> Self { Self { shading_rate: ShadingRatePaletteEntryNV::default(), sample_count: u32::default(), sample_location_count: u32::default(), p_sample_locations: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> CoarseSampleOrderCustomNV<'a> { #[inline] pub fn shading_rate(mut self, shading_rate: ShadingRatePaletteEntryNV) -> Self { self.shading_rate = shading_rate; self } #[inline] pub fn sample_count(mut self, sample_count: u32) -> Self { self.sample_count = sample_count; self } #[inline] pub fn sample_locations(mut self, sample_locations: &'a [CoarseSampleLocationNV]) -> Self { self.sample_location_count = sample_locations.len() as _; self.p_sample_locations = sample_locations.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportCoarseSampleOrderStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub sample_order_type: CoarseSampleOrderTypeNV, pub custom_sample_order_count: u32, pub p_custom_sample_orders: *const CoarseSampleOrderCustomNV<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportCoarseSampleOrderStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineViewportCoarseSampleOrderStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineViewportCoarseSampleOrderStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), sample_order_type: CoarseSampleOrderTypeNV::default(), custom_sample_order_count: u32::default(), p_custom_sample_orders: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportCoarseSampleOrderStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportCoarseSampleOrderStateCreateInfoNV<'_> { } impl<'a> PipelineViewportCoarseSampleOrderStateCreateInfoNV<'a> { #[inline] pub fn sample_order_type(mut self, sample_order_type: CoarseSampleOrderTypeNV) -> Self { self.sample_order_type = sample_order_type; self } #[inline] pub fn custom_sample_orders( mut self, custom_sample_orders: &'a [CoarseSampleOrderCustomNV<'a>], ) -> Self { self.custom_sample_order_count = custom_sample_orders.len() as _; self.p_custom_sample_orders = custom_sample_orders.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMeshShaderFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub task_shader: Bool32, pub mesh_shader: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMeshShaderFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceMeshShaderFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceMeshShaderFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), task_shader: Bool32::default(), mesh_shader: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMeshShaderFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesNV<'_> {} impl<'a> PhysicalDeviceMeshShaderFeaturesNV<'a> { #[inline] pub fn task_shader(mut self, task_shader: bool) -> Self { self.task_shader = task_shader.into(); self } #[inline] pub fn mesh_shader(mut self, mesh_shader: bool) -> Self { self.mesh_shader = mesh_shader.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMeshShaderPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_draw_mesh_tasks_count: u32, pub max_task_work_group_invocations: u32, pub max_task_work_group_size: [u32; 3], pub max_task_total_memory_size: u32, pub max_task_output_count: u32, pub max_mesh_work_group_invocations: u32, pub max_mesh_work_group_size: [u32; 3], pub max_mesh_total_memory_size: u32, pub max_mesh_output_vertices: u32, pub max_mesh_output_primitives: u32, pub max_mesh_multiview_view_count: u32, pub mesh_output_per_vertex_granularity: u32, pub mesh_output_per_primitive_granularity: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMeshShaderPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceMeshShaderPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceMeshShaderPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_draw_mesh_tasks_count: u32::default(), max_task_work_group_invocations: u32::default(), max_task_work_group_size: unsafe { ::core::mem::zeroed() }, max_task_total_memory_size: u32::default(), max_task_output_count: u32::default(), max_mesh_work_group_invocations: u32::default(), max_mesh_work_group_size: unsafe { ::core::mem::zeroed() }, max_mesh_total_memory_size: u32::default(), max_mesh_output_vertices: u32::default(), max_mesh_output_primitives: u32::default(), max_mesh_multiview_view_count: u32::default(), mesh_output_per_vertex_granularity: u32::default(), mesh_output_per_primitive_granularity: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMeshShaderPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesNV<'_> {} impl<'a> PhysicalDeviceMeshShaderPropertiesNV<'a> { #[inline] pub fn max_draw_mesh_tasks_count(mut self, max_draw_mesh_tasks_count: u32) -> Self { self.max_draw_mesh_tasks_count = max_draw_mesh_tasks_count; self } #[inline] pub fn max_task_work_group_invocations(mut self, max_task_work_group_invocations: u32) -> Self { self.max_task_work_group_invocations = max_task_work_group_invocations; self } #[inline] pub fn max_task_work_group_size(mut self, max_task_work_group_size: [u32; 3]) -> Self { self.max_task_work_group_size = max_task_work_group_size; self } #[inline] pub fn max_task_total_memory_size(mut self, max_task_total_memory_size: u32) -> Self { self.max_task_total_memory_size = max_task_total_memory_size; self } #[inline] pub fn max_task_output_count(mut self, max_task_output_count: u32) -> Self { self.max_task_output_count = max_task_output_count; self } #[inline] pub fn max_mesh_work_group_invocations(mut self, max_mesh_work_group_invocations: u32) -> Self { self.max_mesh_work_group_invocations = max_mesh_work_group_invocations; self } #[inline] pub fn max_mesh_work_group_size(mut self, max_mesh_work_group_size: [u32; 3]) -> Self { self.max_mesh_work_group_size = max_mesh_work_group_size; self } #[inline] pub fn max_mesh_total_memory_size(mut self, max_mesh_total_memory_size: u32) -> Self { self.max_mesh_total_memory_size = max_mesh_total_memory_size; self } #[inline] pub fn max_mesh_output_vertices(mut self, max_mesh_output_vertices: u32) -> Self { self.max_mesh_output_vertices = max_mesh_output_vertices; self } #[inline] pub fn max_mesh_output_primitives(mut self, max_mesh_output_primitives: u32) -> Self { self.max_mesh_output_primitives = max_mesh_output_primitives; self } #[inline] pub fn max_mesh_multiview_view_count(mut self, max_mesh_multiview_view_count: u32) -> Self { self.max_mesh_multiview_view_count = max_mesh_multiview_view_count; self } #[inline] pub fn mesh_output_per_vertex_granularity( mut self, mesh_output_per_vertex_granularity: u32, ) -> Self { self.mesh_output_per_vertex_granularity = mesh_output_per_vertex_granularity; self } #[inline] pub fn mesh_output_per_primitive_granularity( mut self, mesh_output_per_primitive_granularity: u32, ) -> Self { self.mesh_output_per_primitive_granularity = mesh_output_per_primitive_granularity; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrawMeshTasksIndirectCommandNV { pub task_count: u32, pub first_task: u32, } impl DrawMeshTasksIndirectCommandNV { #[inline] pub fn task_count(mut self, task_count: u32) -> Self { self.task_count = task_count; self } #[inline] pub fn first_task(mut self, first_task: u32) -> Self { self.first_task = first_task; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMeshShaderFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub task_shader: Bool32, pub mesh_shader: Bool32, pub multiview_mesh_shader: Bool32, pub primitive_fragment_shading_rate_mesh_shader: Bool32, pub mesh_shader_queries: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMeshShaderFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMeshShaderFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMeshShaderFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), task_shader: Bool32::default(), mesh_shader: Bool32::default(), multiview_mesh_shader: Bool32::default(), primitive_fragment_shading_rate_mesh_shader: Bool32::default(), mesh_shader_queries: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMeshShaderFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesEXT<'_> {} impl<'a> PhysicalDeviceMeshShaderFeaturesEXT<'a> { #[inline] pub fn task_shader(mut self, task_shader: bool) -> Self { self.task_shader = task_shader.into(); self } #[inline] pub fn mesh_shader(mut self, mesh_shader: bool) -> Self { self.mesh_shader = mesh_shader.into(); self } #[inline] pub fn multiview_mesh_shader(mut self, multiview_mesh_shader: bool) -> Self { self.multiview_mesh_shader = multiview_mesh_shader.into(); self } #[inline] pub fn primitive_fragment_shading_rate_mesh_shader( mut self, primitive_fragment_shading_rate_mesh_shader: bool, ) -> Self { self.primitive_fragment_shading_rate_mesh_shader = primitive_fragment_shading_rate_mesh_shader.into(); self } #[inline] pub fn mesh_shader_queries(mut self, mesh_shader_queries: bool) -> Self { self.mesh_shader_queries = mesh_shader_queries.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMeshShaderPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_task_work_group_total_count: u32, pub max_task_work_group_count: [u32; 3], pub max_task_work_group_invocations: u32, pub max_task_work_group_size: [u32; 3], pub max_task_payload_size: u32, pub max_task_shared_memory_size: u32, pub max_task_payload_and_shared_memory_size: u32, pub max_mesh_work_group_total_count: u32, pub max_mesh_work_group_count: [u32; 3], pub max_mesh_work_group_invocations: u32, pub max_mesh_work_group_size: [u32; 3], pub max_mesh_shared_memory_size: u32, pub max_mesh_payload_and_shared_memory_size: u32, pub max_mesh_output_memory_size: u32, pub max_mesh_payload_and_output_memory_size: u32, pub max_mesh_output_components: u32, pub max_mesh_output_vertices: u32, pub max_mesh_output_primitives: u32, pub max_mesh_output_layers: u32, pub max_mesh_multiview_view_count: u32, pub mesh_output_per_vertex_granularity: u32, pub mesh_output_per_primitive_granularity: u32, pub max_preferred_task_work_group_invocations: u32, pub max_preferred_mesh_work_group_invocations: u32, pub prefers_local_invocation_vertex_output: Bool32, pub prefers_local_invocation_primitive_output: Bool32, pub prefers_compact_vertex_output: Bool32, pub prefers_compact_primitive_output: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMeshShaderPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMeshShaderPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMeshShaderPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_task_work_group_total_count: u32::default(), max_task_work_group_count: unsafe { ::core::mem::zeroed() }, max_task_work_group_invocations: u32::default(), max_task_work_group_size: unsafe { ::core::mem::zeroed() }, max_task_payload_size: u32::default(), max_task_shared_memory_size: u32::default(), max_task_payload_and_shared_memory_size: u32::default(), max_mesh_work_group_total_count: u32::default(), max_mesh_work_group_count: unsafe { ::core::mem::zeroed() }, max_mesh_work_group_invocations: u32::default(), max_mesh_work_group_size: unsafe { ::core::mem::zeroed() }, max_mesh_shared_memory_size: u32::default(), max_mesh_payload_and_shared_memory_size: u32::default(), max_mesh_output_memory_size: u32::default(), max_mesh_payload_and_output_memory_size: u32::default(), max_mesh_output_components: u32::default(), max_mesh_output_vertices: u32::default(), max_mesh_output_primitives: u32::default(), max_mesh_output_layers: u32::default(), max_mesh_multiview_view_count: u32::default(), mesh_output_per_vertex_granularity: u32::default(), mesh_output_per_primitive_granularity: u32::default(), max_preferred_task_work_group_invocations: u32::default(), max_preferred_mesh_work_group_invocations: u32::default(), prefers_local_invocation_vertex_output: Bool32::default(), prefers_local_invocation_primitive_output: Bool32::default(), prefers_compact_vertex_output: Bool32::default(), prefers_compact_primitive_output: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMeshShaderPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesEXT<'_> {} impl<'a> PhysicalDeviceMeshShaderPropertiesEXT<'a> { #[inline] pub fn max_task_work_group_total_count(mut self, max_task_work_group_total_count: u32) -> Self { self.max_task_work_group_total_count = max_task_work_group_total_count; self } #[inline] pub fn max_task_work_group_count(mut self, max_task_work_group_count: [u32; 3]) -> Self { self.max_task_work_group_count = max_task_work_group_count; self } #[inline] pub fn max_task_work_group_invocations(mut self, max_task_work_group_invocations: u32) -> Self { self.max_task_work_group_invocations = max_task_work_group_invocations; self } #[inline] pub fn max_task_work_group_size(mut self, max_task_work_group_size: [u32; 3]) -> Self { self.max_task_work_group_size = max_task_work_group_size; self } #[inline] pub fn max_task_payload_size(mut self, max_task_payload_size: u32) -> Self { self.max_task_payload_size = max_task_payload_size; self } #[inline] pub fn max_task_shared_memory_size(mut self, max_task_shared_memory_size: u32) -> Self { self.max_task_shared_memory_size = max_task_shared_memory_size; self } #[inline] pub fn max_task_payload_and_shared_memory_size( mut self, max_task_payload_and_shared_memory_size: u32, ) -> Self { self.max_task_payload_and_shared_memory_size = max_task_payload_and_shared_memory_size; self } #[inline] pub fn max_mesh_work_group_total_count(mut self, max_mesh_work_group_total_count: u32) -> Self { self.max_mesh_work_group_total_count = max_mesh_work_group_total_count; self } #[inline] pub fn max_mesh_work_group_count(mut self, max_mesh_work_group_count: [u32; 3]) -> Self { self.max_mesh_work_group_count = max_mesh_work_group_count; self } #[inline] pub fn max_mesh_work_group_invocations(mut self, max_mesh_work_group_invocations: u32) -> Self { self.max_mesh_work_group_invocations = max_mesh_work_group_invocations; self } #[inline] pub fn max_mesh_work_group_size(mut self, max_mesh_work_group_size: [u32; 3]) -> Self { self.max_mesh_work_group_size = max_mesh_work_group_size; self } #[inline] pub fn max_mesh_shared_memory_size(mut self, max_mesh_shared_memory_size: u32) -> Self { self.max_mesh_shared_memory_size = max_mesh_shared_memory_size; self } #[inline] pub fn max_mesh_payload_and_shared_memory_size( mut self, max_mesh_payload_and_shared_memory_size: u32, ) -> Self { self.max_mesh_payload_and_shared_memory_size = max_mesh_payload_and_shared_memory_size; self } #[inline] pub fn max_mesh_output_memory_size(mut self, max_mesh_output_memory_size: u32) -> Self { self.max_mesh_output_memory_size = max_mesh_output_memory_size; self } #[inline] pub fn max_mesh_payload_and_output_memory_size( mut self, max_mesh_payload_and_output_memory_size: u32, ) -> Self { self.max_mesh_payload_and_output_memory_size = max_mesh_payload_and_output_memory_size; self } #[inline] pub fn max_mesh_output_components(mut self, max_mesh_output_components: u32) -> Self { self.max_mesh_output_components = max_mesh_output_components; self } #[inline] pub fn max_mesh_output_vertices(mut self, max_mesh_output_vertices: u32) -> Self { self.max_mesh_output_vertices = max_mesh_output_vertices; self } #[inline] pub fn max_mesh_output_primitives(mut self, max_mesh_output_primitives: u32) -> Self { self.max_mesh_output_primitives = max_mesh_output_primitives; self } #[inline] pub fn max_mesh_output_layers(mut self, max_mesh_output_layers: u32) -> Self { self.max_mesh_output_layers = max_mesh_output_layers; self } #[inline] pub fn max_mesh_multiview_view_count(mut self, max_mesh_multiview_view_count: u32) -> Self { self.max_mesh_multiview_view_count = max_mesh_multiview_view_count; self } #[inline] pub fn mesh_output_per_vertex_granularity( mut self, mesh_output_per_vertex_granularity: u32, ) -> Self { self.mesh_output_per_vertex_granularity = mesh_output_per_vertex_granularity; self } #[inline] pub fn mesh_output_per_primitive_granularity( mut self, mesh_output_per_primitive_granularity: u32, ) -> Self { self.mesh_output_per_primitive_granularity = mesh_output_per_primitive_granularity; self } #[inline] pub fn max_preferred_task_work_group_invocations( mut self, max_preferred_task_work_group_invocations: u32, ) -> Self { self.max_preferred_task_work_group_invocations = max_preferred_task_work_group_invocations; self } #[inline] pub fn max_preferred_mesh_work_group_invocations( mut self, max_preferred_mesh_work_group_invocations: u32, ) -> Self { self.max_preferred_mesh_work_group_invocations = max_preferred_mesh_work_group_invocations; self } #[inline] pub fn prefers_local_invocation_vertex_output( mut self, prefers_local_invocation_vertex_output: bool, ) -> Self { self.prefers_local_invocation_vertex_output = prefers_local_invocation_vertex_output.into(); self } #[inline] pub fn prefers_local_invocation_primitive_output( mut self, prefers_local_invocation_primitive_output: bool, ) -> Self { self.prefers_local_invocation_primitive_output = prefers_local_invocation_primitive_output.into(); self } #[inline] pub fn prefers_compact_vertex_output(mut self, prefers_compact_vertex_output: bool) -> Self { self.prefers_compact_vertex_output = prefers_compact_vertex_output.into(); self } #[inline] pub fn prefers_compact_primitive_output( mut self, prefers_compact_primitive_output: bool, ) -> Self { self.prefers_compact_primitive_output = prefers_compact_primitive_output.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrawMeshTasksIndirectCommandEXT { pub group_count_x: u32, pub group_count_y: u32, pub group_count_z: u32, } impl DrawMeshTasksIndirectCommandEXT { #[inline] pub fn group_count_x(mut self, group_count_x: u32) -> Self { self.group_count_x = group_count_x; self } #[inline] pub fn group_count_y(mut self, group_count_y: u32) -> Self { self.group_count_y = group_count_y; self } #[inline] pub fn group_count_z(mut self, group_count_z: u32) -> Self { self.group_count_z = group_count_z; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RayTracingShaderGroupCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: RayTracingShaderGroupTypeKHR, pub general_shader: u32, pub closest_hit_shader: u32, pub any_hit_shader: u32, pub intersection_shader: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RayTracingShaderGroupCreateInfoNV<'_> {} unsafe impl Sync for RayTracingShaderGroupCreateInfoNV<'_> {} impl ::core::default::Default for RayTracingShaderGroupCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: RayTracingShaderGroupTypeKHR::default(), general_shader: u32::default(), closest_hit_shader: u32::default(), any_hit_shader: u32::default(), intersection_shader: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RayTracingShaderGroupCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV; } impl<'a> RayTracingShaderGroupCreateInfoNV<'a> { #[inline] pub fn ty(mut self, ty: RayTracingShaderGroupTypeKHR) -> Self { self.ty = ty; self } #[inline] pub fn general_shader(mut self, general_shader: u32) -> Self { self.general_shader = general_shader; self } #[inline] pub fn closest_hit_shader(mut self, closest_hit_shader: u32) -> Self { self.closest_hit_shader = closest_hit_shader; self } #[inline] pub fn any_hit_shader(mut self, any_hit_shader: u32) -> Self { self.any_hit_shader = any_hit_shader; self } #[inline] pub fn intersection_shader(mut self, intersection_shader: u32) -> Self { self.intersection_shader = intersection_shader; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RayTracingShaderGroupCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: RayTracingShaderGroupTypeKHR, pub general_shader: u32, pub closest_hit_shader: u32, pub any_hit_shader: u32, pub intersection_shader: u32, pub p_shader_group_capture_replay_handle: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RayTracingShaderGroupCreateInfoKHR<'_> {} unsafe impl Sync for RayTracingShaderGroupCreateInfoKHR<'_> {} impl ::core::default::Default for RayTracingShaderGroupCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: RayTracingShaderGroupTypeKHR::default(), general_shader: u32::default(), closest_hit_shader: u32::default(), any_hit_shader: u32::default(), intersection_shader: u32::default(), p_shader_group_capture_replay_handle: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RayTracingShaderGroupCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR; } impl<'a> RayTracingShaderGroupCreateInfoKHR<'a> { #[inline] pub fn ty(mut self, ty: RayTracingShaderGroupTypeKHR) -> Self { self.ty = ty; self } #[inline] pub fn general_shader(mut self, general_shader: u32) -> Self { self.general_shader = general_shader; self } #[inline] pub fn closest_hit_shader(mut self, closest_hit_shader: u32) -> Self { self.closest_hit_shader = closest_hit_shader; self } #[inline] pub fn any_hit_shader(mut self, any_hit_shader: u32) -> Self { self.any_hit_shader = any_hit_shader; self } #[inline] pub fn intersection_shader(mut self, intersection_shader: u32) -> Self { self.intersection_shader = intersection_shader; self } #[inline] pub fn shader_group_capture_replay_handle( mut self, shader_group_capture_replay_handle: *const c_void, ) -> Self { self.p_shader_group_capture_replay_handle = shader_group_capture_replay_handle; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RayTracingPipelineCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags, pub stage_count: u32, pub p_stages: *const PipelineShaderStageCreateInfo<'a>, pub group_count: u32, pub p_groups: *const RayTracingShaderGroupCreateInfoNV<'a>, pub max_recursion_depth: u32, pub layout: PipelineLayout, pub base_pipeline_handle: Pipeline, pub base_pipeline_index: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RayTracingPipelineCreateInfoNV<'_> {} unsafe impl Sync for RayTracingPipelineCreateInfoNV<'_> {} impl ::core::default::Default for RayTracingPipelineCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags::default(), stage_count: u32::default(), p_stages: ::core::ptr::null(), group_count: u32::default(), p_groups: ::core::ptr::null(), max_recursion_depth: u32::default(), layout: PipelineLayout::default(), base_pipeline_handle: Pipeline::default(), base_pipeline_index: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RayTracingPipelineCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_PIPELINE_CREATE_INFO_NV; } pub unsafe trait ExtendsRayTracingPipelineCreateInfoNV {} impl<'a> RayTracingPipelineCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo<'a>]) -> Self { self.stage_count = stages.len() as _; self.p_stages = stages.as_ptr(); self } #[inline] pub fn groups(mut self, groups: &'a [RayTracingShaderGroupCreateInfoNV<'a>]) -> Self { self.group_count = groups.len() as _; self.p_groups = groups.as_ptr(); self } #[inline] pub fn max_recursion_depth(mut self, max_recursion_depth: u32) -> Self { self.max_recursion_depth = max_recursion_depth; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self { self.base_pipeline_handle = base_pipeline_handle; self } #[inline] pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self { self.base_pipeline_index = base_pipeline_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RayTracingPipelineCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags, pub stage_count: u32, pub p_stages: *const PipelineShaderStageCreateInfo<'a>, pub group_count: u32, pub p_groups: *const RayTracingShaderGroupCreateInfoKHR<'a>, pub max_pipeline_ray_recursion_depth: u32, pub p_library_info: *const PipelineLibraryCreateInfoKHR<'a>, pub p_library_interface: *const RayTracingPipelineInterfaceCreateInfoKHR<'a>, pub p_dynamic_state: *const PipelineDynamicStateCreateInfo<'a>, pub layout: PipelineLayout, pub base_pipeline_handle: Pipeline, pub base_pipeline_index: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RayTracingPipelineCreateInfoKHR<'_> {} unsafe impl Sync for RayTracingPipelineCreateInfoKHR<'_> {} impl ::core::default::Default for RayTracingPipelineCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags::default(), stage_count: u32::default(), p_stages: ::core::ptr::null(), group_count: u32::default(), p_groups: ::core::ptr::null(), max_pipeline_ray_recursion_depth: u32::default(), p_library_info: ::core::ptr::null(), p_library_interface: ::core::ptr::null(), p_dynamic_state: ::core::ptr::null(), layout: PipelineLayout::default(), base_pipeline_handle: Pipeline::default(), base_pipeline_index: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RayTracingPipelineCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_PIPELINE_CREATE_INFO_KHR; } pub unsafe trait ExtendsRayTracingPipelineCreateInfoKHR {} impl<'a> RayTracingPipelineCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo<'a>]) -> Self { self.stage_count = stages.len() as _; self.p_stages = stages.as_ptr(); self } #[inline] pub fn groups(mut self, groups: &'a [RayTracingShaderGroupCreateInfoKHR<'a>]) -> Self { self.group_count = groups.len() as _; self.p_groups = groups.as_ptr(); self } #[inline] pub fn max_pipeline_ray_recursion_depth( mut self, max_pipeline_ray_recursion_depth: u32, ) -> Self { self.max_pipeline_ray_recursion_depth = max_pipeline_ray_recursion_depth; self } #[inline] pub fn library_info(mut self, library_info: &'a PipelineLibraryCreateInfoKHR<'a>) -> Self { self.p_library_info = library_info; self } #[inline] pub fn library_interface( mut self, library_interface: &'a RayTracingPipelineInterfaceCreateInfoKHR<'a>, ) -> Self { self.p_library_interface = library_interface; self } #[inline] pub fn dynamic_state(mut self, dynamic_state: &'a PipelineDynamicStateCreateInfo<'a>) -> Self { self.p_dynamic_state = dynamic_state; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self { self.base_pipeline_handle = base_pipeline_handle; self } #[inline] pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self { self.base_pipeline_index = base_pipeline_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GeometryTrianglesNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub vertex_data: Buffer, pub vertex_offset: DeviceSize, pub vertex_count: u32, pub vertex_stride: DeviceSize, pub vertex_format: Format, pub index_data: Buffer, pub index_offset: DeviceSize, pub index_count: u32, pub index_type: IndexType, pub transform_data: Buffer, pub transform_offset: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GeometryTrianglesNV<'_> {} unsafe impl Sync for GeometryTrianglesNV<'_> {} impl ::core::default::Default for GeometryTrianglesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), vertex_data: Buffer::default(), vertex_offset: DeviceSize::default(), vertex_count: u32::default(), vertex_stride: DeviceSize::default(), vertex_format: Format::default(), index_data: Buffer::default(), index_offset: DeviceSize::default(), index_count: u32::default(), index_type: IndexType::default(), transform_data: Buffer::default(), transform_offset: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GeometryTrianglesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_TRIANGLES_NV; } impl<'a> GeometryTrianglesNV<'a> { #[inline] pub fn vertex_data(mut self, vertex_data: Buffer) -> Self { self.vertex_data = vertex_data; self } #[inline] pub fn vertex_offset(mut self, vertex_offset: DeviceSize) -> Self { self.vertex_offset = vertex_offset; self } #[inline] pub fn vertex_count(mut self, vertex_count: u32) -> Self { self.vertex_count = vertex_count; self } #[inline] pub fn vertex_stride(mut self, vertex_stride: DeviceSize) -> Self { self.vertex_stride = vertex_stride; self } #[inline] pub fn vertex_format(mut self, vertex_format: Format) -> Self { self.vertex_format = vertex_format; self } #[inline] pub fn index_data(mut self, index_data: Buffer) -> Self { self.index_data = index_data; self } #[inline] pub fn index_offset(mut self, index_offset: DeviceSize) -> Self { self.index_offset = index_offset; self } #[inline] pub fn index_count(mut self, index_count: u32) -> Self { self.index_count = index_count; self } #[inline] pub fn index_type(mut self, index_type: IndexType) -> Self { self.index_type = index_type; self } #[inline] pub fn transform_data(mut self, transform_data: Buffer) -> Self { self.transform_data = transform_data; self } #[inline] pub fn transform_offset(mut self, transform_offset: DeviceSize) -> Self { self.transform_offset = transform_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GeometryAABBNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub aabb_data: Buffer, pub num_aab_bs: u32, pub stride: u32, pub offset: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GeometryAABBNV<'_> {} unsafe impl Sync for GeometryAABBNV<'_> {} impl ::core::default::Default for GeometryAABBNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), aabb_data: Buffer::default(), num_aab_bs: u32::default(), stride: u32::default(), offset: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GeometryAABBNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_AABB_NV; } impl<'a> GeometryAABBNV<'a> { #[inline] pub fn aabb_data(mut self, aabb_data: Buffer) -> Self { self.aabb_data = aabb_data; self } #[inline] pub fn num_aab_bs(mut self, num_aab_bs: u32) -> Self { self.num_aab_bs = num_aab_bs; self } #[inline] pub fn stride(mut self, stride: u32) -> Self { self.stride = stride; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct GeometryDataNV<'a> { pub triangles: GeometryTrianglesNV<'a>, pub aabbs: GeometryAABBNV<'a>, pub _marker: PhantomData<&'a ()>, } impl<'a> GeometryDataNV<'a> { #[inline] pub fn triangles(mut self, triangles: GeometryTrianglesNV<'a>) -> Self { self.triangles = triangles; self } #[inline] pub fn aabbs(mut self, aabbs: GeometryAABBNV<'a>) -> Self { self.aabbs = aabbs; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GeometryNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub geometry_type: GeometryTypeKHR, pub geometry: GeometryDataNV<'a>, pub flags: GeometryFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GeometryNV<'_> {} unsafe impl Sync for GeometryNV<'_> {} impl ::core::default::Default for GeometryNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), geometry_type: GeometryTypeKHR::default(), geometry: GeometryDataNV::default(), flags: GeometryFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GeometryNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_NV; } impl<'a> GeometryNV<'a> { #[inline] pub fn geometry_type(mut self, geometry_type: GeometryTypeKHR) -> Self { self.geometry_type = geometry_type; self } #[inline] pub fn geometry(mut self, geometry: GeometryDataNV<'a>) -> Self { self.geometry = geometry; self } #[inline] pub fn flags(mut self, flags: GeometryFlagsKHR) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: AccelerationStructureTypeNV, pub flags: BuildAccelerationStructureFlagsNV, pub instance_count: u32, pub geometry_count: u32, pub p_geometries: *const GeometryNV<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureInfoNV<'_> {} unsafe impl Sync for AccelerationStructureInfoNV<'_> {} impl ::core::default::Default for AccelerationStructureInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: AccelerationStructureTypeNV::default(), flags: BuildAccelerationStructureFlagsNV::default(), instance_count: u32::default(), geometry_count: u32::default(), p_geometries: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_INFO_NV; } impl<'a> AccelerationStructureInfoNV<'a> { #[inline] pub fn ty(mut self, ty: AccelerationStructureTypeNV) -> Self { self.ty = ty; self } #[inline] pub fn flags(mut self, flags: BuildAccelerationStructureFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn instance_count(mut self, instance_count: u32) -> Self { self.instance_count = instance_count; self } #[inline] pub fn geometries(mut self, geometries: &'a [GeometryNV<'a>]) -> Self { self.geometry_count = geometries.len() as _; self.p_geometries = geometries.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub compacted_size: DeviceSize, pub info: AccelerationStructureInfoNV<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureCreateInfoNV<'_> {} unsafe impl Sync for AccelerationStructureCreateInfoNV<'_> {} impl ::core::default::Default for AccelerationStructureCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), compacted_size: DeviceSize::default(), info: AccelerationStructureInfoNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_CREATE_INFO_NV; } pub unsafe trait ExtendsAccelerationStructureCreateInfoNV {} impl<'a> AccelerationStructureCreateInfoNV<'a> { #[inline] pub fn compacted_size(mut self, compacted_size: DeviceSize) -> Self { self.compacted_size = compacted_size; self } #[inline] pub fn info(mut self, info: AccelerationStructureInfoNV<'a>) -> Self { self.info = info; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindAccelerationStructureMemoryInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure: AccelerationStructureNV, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub device_index_count: u32, pub p_device_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindAccelerationStructureMemoryInfoNV<'_> {} unsafe impl Sync for BindAccelerationStructureMemoryInfoNV<'_> {} impl ::core::default::Default for BindAccelerationStructureMemoryInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure: AccelerationStructureNV::default(), memory: DeviceMemory::default(), memory_offset: DeviceSize::default(), device_index_count: u32::default(), p_device_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindAccelerationStructureMemoryInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV; } impl<'a> BindAccelerationStructureMemoryInfoNV<'a> { #[inline] pub fn acceleration_structure( mut self, acceleration_structure: AccelerationStructureNV, ) -> Self { self.acceleration_structure = acceleration_structure; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[inline] pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self { self.device_index_count = device_indices.len() as _; self.p_device_indices = device_indices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct WriteDescriptorSetAccelerationStructureKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure_count: u32, pub p_acceleration_structures: *const AccelerationStructureKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for WriteDescriptorSetAccelerationStructureKHR<'_> {} unsafe impl Sync for WriteDescriptorSetAccelerationStructureKHR<'_> {} impl ::core::default::Default for WriteDescriptorSetAccelerationStructureKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure_count: u32::default(), p_acceleration_structures: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for WriteDescriptorSetAccelerationStructureKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR; } unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureKHR<'_> {} impl<'a> WriteDescriptorSetAccelerationStructureKHR<'a> { #[inline] pub fn acceleration_structures( mut self, acceleration_structures: &'a [AccelerationStructureKHR], ) -> Self { self.acceleration_structure_count = acceleration_structures.len() as _; self.p_acceleration_structures = acceleration_structures.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct WriteDescriptorSetAccelerationStructureNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure_count: u32, pub p_acceleration_structures: *const AccelerationStructureNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for WriteDescriptorSetAccelerationStructureNV<'_> {} unsafe impl Sync for WriteDescriptorSetAccelerationStructureNV<'_> {} impl ::core::default::Default for WriteDescriptorSetAccelerationStructureNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure_count: u32::default(), p_acceleration_structures: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for WriteDescriptorSetAccelerationStructureNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV; } unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureNV<'_> {} impl<'a> WriteDescriptorSetAccelerationStructureNV<'a> { #[inline] pub fn acceleration_structures( mut self, acceleration_structures: &'a [AccelerationStructureNV], ) -> Self { self.acceleration_structure_count = acceleration_structures.len() as _; self.p_acceleration_structures = acceleration_structures.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureMemoryRequirementsInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: AccelerationStructureMemoryRequirementsTypeNV, pub acceleration_structure: AccelerationStructureNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureMemoryRequirementsInfoNV<'_> {} unsafe impl Sync for AccelerationStructureMemoryRequirementsInfoNV<'_> {} impl ::core::default::Default for AccelerationStructureMemoryRequirementsInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: AccelerationStructureMemoryRequirementsTypeNV::default(), acceleration_structure: AccelerationStructureNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureMemoryRequirementsInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV; } impl<'a> AccelerationStructureMemoryRequirementsInfoNV<'a> { #[inline] pub fn ty(mut self, ty: AccelerationStructureMemoryRequirementsTypeNV) -> Self { self.ty = ty; self } #[inline] pub fn acceleration_structure( mut self, acceleration_structure: AccelerationStructureNV, ) -> Self { self.acceleration_structure = acceleration_structure; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAccelerationStructureFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub acceleration_structure: Bool32, pub acceleration_structure_capture_replay: Bool32, pub acceleration_structure_indirect_build: Bool32, pub acceleration_structure_host_commands: Bool32, pub descriptor_binding_acceleration_structure_update_after_bind: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAccelerationStructureFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceAccelerationStructureFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceAccelerationStructureFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), acceleration_structure: Bool32::default(), acceleration_structure_capture_replay: Bool32::default(), acceleration_structure_indirect_build: Bool32::default(), acceleration_structure_host_commands: Bool32::default(), descriptor_binding_acceleration_structure_update_after_bind: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAccelerationStructureFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAccelerationStructureFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAccelerationStructureFeaturesKHR<'_> {} impl<'a> PhysicalDeviceAccelerationStructureFeaturesKHR<'a> { #[inline] pub fn acceleration_structure(mut self, acceleration_structure: bool) -> Self { self.acceleration_structure = acceleration_structure.into(); self } #[inline] pub fn acceleration_structure_capture_replay( mut self, acceleration_structure_capture_replay: bool, ) -> Self { self.acceleration_structure_capture_replay = acceleration_structure_capture_replay.into(); self } #[inline] pub fn acceleration_structure_indirect_build( mut self, acceleration_structure_indirect_build: bool, ) -> Self { self.acceleration_structure_indirect_build = acceleration_structure_indirect_build.into(); self } #[inline] pub fn acceleration_structure_host_commands( mut self, acceleration_structure_host_commands: bool, ) -> Self { self.acceleration_structure_host_commands = acceleration_structure_host_commands.into(); self } #[inline] pub fn descriptor_binding_acceleration_structure_update_after_bind( mut self, descriptor_binding_acceleration_structure_update_after_bind: bool, ) -> Self { self.descriptor_binding_acceleration_structure_update_after_bind = descriptor_binding_acceleration_structure_update_after_bind.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingPipelineFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_pipeline: Bool32, pub ray_tracing_pipeline_shader_group_handle_capture_replay: Bool32, pub ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: Bool32, pub ray_tracing_pipeline_trace_rays_indirect: Bool32, pub ray_traversal_primitive_culling: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingPipelineFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingPipelineFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingPipelineFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_pipeline: Bool32::default(), ray_tracing_pipeline_shader_group_handle_capture_replay: Bool32::default(), ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: Bool32::default(), ray_tracing_pipeline_trace_rays_indirect: Bool32::default(), ray_traversal_primitive_culling: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingPipelineFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingPipelineFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingPipelineFeaturesKHR<'_> {} impl<'a> PhysicalDeviceRayTracingPipelineFeaturesKHR<'a> { #[inline] pub fn ray_tracing_pipeline(mut self, ray_tracing_pipeline: bool) -> Self { self.ray_tracing_pipeline = ray_tracing_pipeline.into(); self } #[inline] pub fn ray_tracing_pipeline_shader_group_handle_capture_replay( mut self, ray_tracing_pipeline_shader_group_handle_capture_replay: bool, ) -> Self { self.ray_tracing_pipeline_shader_group_handle_capture_replay = ray_tracing_pipeline_shader_group_handle_capture_replay.into(); self } #[inline] pub fn ray_tracing_pipeline_shader_group_handle_capture_replay_mixed( mut self, ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: bool, ) -> Self { self.ray_tracing_pipeline_shader_group_handle_capture_replay_mixed = ray_tracing_pipeline_shader_group_handle_capture_replay_mixed.into(); self } #[inline] pub fn ray_tracing_pipeline_trace_rays_indirect( mut self, ray_tracing_pipeline_trace_rays_indirect: bool, ) -> Self { self.ray_tracing_pipeline_trace_rays_indirect = ray_tracing_pipeline_trace_rays_indirect.into(); self } #[inline] pub fn ray_traversal_primitive_culling( mut self, ray_traversal_primitive_culling: bool, ) -> Self { self.ray_traversal_primitive_culling = ray_traversal_primitive_culling.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayQueryFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_query: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayQueryFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceRayQueryFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceRayQueryFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_query: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayQueryFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayQueryFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayQueryFeaturesKHR<'_> {} impl<'a> PhysicalDeviceRayQueryFeaturesKHR<'a> { #[inline] pub fn ray_query(mut self, ray_query: bool) -> Self { self.ray_query = ray_query.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAccelerationStructurePropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_geometry_count: u64, pub max_instance_count: u64, pub max_primitive_count: u64, pub max_per_stage_descriptor_acceleration_structures: u32, pub max_per_stage_descriptor_update_after_bind_acceleration_structures: u32, pub max_descriptor_set_acceleration_structures: u32, pub max_descriptor_set_update_after_bind_acceleration_structures: u32, pub min_acceleration_structure_scratch_offset_alignment: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAccelerationStructurePropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceAccelerationStructurePropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceAccelerationStructurePropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_geometry_count: u64::default(), max_instance_count: u64::default(), max_primitive_count: u64::default(), max_per_stage_descriptor_acceleration_structures: u32::default(), max_per_stage_descriptor_update_after_bind_acceleration_structures: u32::default(), max_descriptor_set_acceleration_structures: u32::default(), max_descriptor_set_update_after_bind_acceleration_structures: u32::default(), min_acceleration_structure_scratch_offset_alignment: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAccelerationStructurePropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceAccelerationStructurePropertiesKHR<'_> { } impl<'a> PhysicalDeviceAccelerationStructurePropertiesKHR<'a> { #[inline] pub fn max_geometry_count(mut self, max_geometry_count: u64) -> Self { self.max_geometry_count = max_geometry_count; self } #[inline] pub fn max_instance_count(mut self, max_instance_count: u64) -> Self { self.max_instance_count = max_instance_count; self } #[inline] pub fn max_primitive_count(mut self, max_primitive_count: u64) -> Self { self.max_primitive_count = max_primitive_count; self } #[inline] pub fn max_per_stage_descriptor_acceleration_structures( mut self, max_per_stage_descriptor_acceleration_structures: u32, ) -> Self { self.max_per_stage_descriptor_acceleration_structures = max_per_stage_descriptor_acceleration_structures; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_acceleration_structures( mut self, max_per_stage_descriptor_update_after_bind_acceleration_structures: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_acceleration_structures = max_per_stage_descriptor_update_after_bind_acceleration_structures; self } #[inline] pub fn max_descriptor_set_acceleration_structures( mut self, max_descriptor_set_acceleration_structures: u32, ) -> Self { self.max_descriptor_set_acceleration_structures = max_descriptor_set_acceleration_structures; self } #[inline] pub fn max_descriptor_set_update_after_bind_acceleration_structures( mut self, max_descriptor_set_update_after_bind_acceleration_structures: u32, ) -> Self { self.max_descriptor_set_update_after_bind_acceleration_structures = max_descriptor_set_update_after_bind_acceleration_structures; self } #[inline] pub fn min_acceleration_structure_scratch_offset_alignment( mut self, min_acceleration_structure_scratch_offset_alignment: u32, ) -> Self { self.min_acceleration_structure_scratch_offset_alignment = min_acceleration_structure_scratch_offset_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingPipelinePropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_group_handle_size: u32, pub max_ray_recursion_depth: u32, pub max_shader_group_stride: u32, pub shader_group_base_alignment: u32, pub shader_group_handle_capture_replay_size: u32, pub max_ray_dispatch_invocation_count: u32, pub shader_group_handle_alignment: u32, pub max_ray_hit_attribute_size: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingPipelinePropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingPipelinePropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingPipelinePropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_group_handle_size: u32::default(), max_ray_recursion_depth: u32::default(), max_shader_group_stride: u32::default(), shader_group_base_alignment: u32::default(), shader_group_handle_capture_replay_size: u32::default(), max_ray_dispatch_invocation_count: u32::default(), shader_group_handle_alignment: u32::default(), max_ray_hit_attribute_size: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingPipelinePropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPipelinePropertiesKHR<'_> {} impl<'a> PhysicalDeviceRayTracingPipelinePropertiesKHR<'a> { #[inline] pub fn shader_group_handle_size(mut self, shader_group_handle_size: u32) -> Self { self.shader_group_handle_size = shader_group_handle_size; self } #[inline] pub fn max_ray_recursion_depth(mut self, max_ray_recursion_depth: u32) -> Self { self.max_ray_recursion_depth = max_ray_recursion_depth; self } #[inline] pub fn max_shader_group_stride(mut self, max_shader_group_stride: u32) -> Self { self.max_shader_group_stride = max_shader_group_stride; self } #[inline] pub fn shader_group_base_alignment(mut self, shader_group_base_alignment: u32) -> Self { self.shader_group_base_alignment = shader_group_base_alignment; self } #[inline] pub fn shader_group_handle_capture_replay_size( mut self, shader_group_handle_capture_replay_size: u32, ) -> Self { self.shader_group_handle_capture_replay_size = shader_group_handle_capture_replay_size; self } #[inline] pub fn max_ray_dispatch_invocation_count( mut self, max_ray_dispatch_invocation_count: u32, ) -> Self { self.max_ray_dispatch_invocation_count = max_ray_dispatch_invocation_count; self } #[inline] pub fn shader_group_handle_alignment(mut self, shader_group_handle_alignment: u32) -> Self { self.shader_group_handle_alignment = shader_group_handle_alignment; self } #[inline] pub fn max_ray_hit_attribute_size(mut self, max_ray_hit_attribute_size: u32) -> Self { self.max_ray_hit_attribute_size = max_ray_hit_attribute_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_group_handle_size: u32, pub max_recursion_depth: u32, pub max_shader_group_stride: u32, pub shader_group_base_alignment: u32, pub max_geometry_count: u64, pub max_instance_count: u64, pub max_triangle_count: u64, pub max_descriptor_set_acceleration_structures: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_group_handle_size: u32::default(), max_recursion_depth: u32::default(), max_shader_group_stride: u32::default(), shader_group_base_alignment: u32::default(), max_geometry_count: u64::default(), max_instance_count: u64::default(), max_triangle_count: u64::default(), max_descriptor_set_acceleration_structures: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPropertiesNV<'_> {} impl<'a> PhysicalDeviceRayTracingPropertiesNV<'a> { #[inline] pub fn shader_group_handle_size(mut self, shader_group_handle_size: u32) -> Self { self.shader_group_handle_size = shader_group_handle_size; self } #[inline] pub fn max_recursion_depth(mut self, max_recursion_depth: u32) -> Self { self.max_recursion_depth = max_recursion_depth; self } #[inline] pub fn max_shader_group_stride(mut self, max_shader_group_stride: u32) -> Self { self.max_shader_group_stride = max_shader_group_stride; self } #[inline] pub fn shader_group_base_alignment(mut self, shader_group_base_alignment: u32) -> Self { self.shader_group_base_alignment = shader_group_base_alignment; self } #[inline] pub fn max_geometry_count(mut self, max_geometry_count: u64) -> Self { self.max_geometry_count = max_geometry_count; self } #[inline] pub fn max_instance_count(mut self, max_instance_count: u64) -> Self { self.max_instance_count = max_instance_count; self } #[inline] pub fn max_triangle_count(mut self, max_triangle_count: u64) -> Self { self.max_triangle_count = max_triangle_count; self } #[inline] pub fn max_descriptor_set_acceleration_structures( mut self, max_descriptor_set_acceleration_structures: u32, ) -> Self { self.max_descriptor_set_acceleration_structures = max_descriptor_set_acceleration_structures; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct StridedDeviceAddressRegionKHR { pub device_address: DeviceAddress, pub stride: DeviceSize, pub size: DeviceSize, } impl StridedDeviceAddressRegionKHR { #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } #[inline] pub fn stride(mut self, stride: DeviceSize) -> Self { self.stride = stride; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct TraceRaysIndirectCommandKHR { pub width: u32, pub height: u32, pub depth: u32, } impl TraceRaysIndirectCommandKHR { #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn depth(mut self, depth: u32) -> Self { self.depth = depth; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct TraceRaysIndirectCommand2KHR { pub raygen_shader_record_address: DeviceAddress, pub raygen_shader_record_size: DeviceSize, pub miss_shader_binding_table_address: DeviceAddress, pub miss_shader_binding_table_size: DeviceSize, pub miss_shader_binding_table_stride: DeviceSize, pub hit_shader_binding_table_address: DeviceAddress, pub hit_shader_binding_table_size: DeviceSize, pub hit_shader_binding_table_stride: DeviceSize, pub callable_shader_binding_table_address: DeviceAddress, pub callable_shader_binding_table_size: DeviceSize, pub callable_shader_binding_table_stride: DeviceSize, pub width: u32, pub height: u32, pub depth: u32, } impl TraceRaysIndirectCommand2KHR { #[inline] pub fn raygen_shader_record_address( mut self, raygen_shader_record_address: DeviceAddress, ) -> Self { self.raygen_shader_record_address = raygen_shader_record_address; self } #[inline] pub fn raygen_shader_record_size(mut self, raygen_shader_record_size: DeviceSize) -> Self { self.raygen_shader_record_size = raygen_shader_record_size; self } #[inline] pub fn miss_shader_binding_table_address( mut self, miss_shader_binding_table_address: DeviceAddress, ) -> Self { self.miss_shader_binding_table_address = miss_shader_binding_table_address; self } #[inline] pub fn miss_shader_binding_table_size( mut self, miss_shader_binding_table_size: DeviceSize, ) -> Self { self.miss_shader_binding_table_size = miss_shader_binding_table_size; self } #[inline] pub fn miss_shader_binding_table_stride( mut self, miss_shader_binding_table_stride: DeviceSize, ) -> Self { self.miss_shader_binding_table_stride = miss_shader_binding_table_stride; self } #[inline] pub fn hit_shader_binding_table_address( mut self, hit_shader_binding_table_address: DeviceAddress, ) -> Self { self.hit_shader_binding_table_address = hit_shader_binding_table_address; self } #[inline] pub fn hit_shader_binding_table_size( mut self, hit_shader_binding_table_size: DeviceSize, ) -> Self { self.hit_shader_binding_table_size = hit_shader_binding_table_size; self } #[inline] pub fn hit_shader_binding_table_stride( mut self, hit_shader_binding_table_stride: DeviceSize, ) -> Self { self.hit_shader_binding_table_stride = hit_shader_binding_table_stride; self } #[inline] pub fn callable_shader_binding_table_address( mut self, callable_shader_binding_table_address: DeviceAddress, ) -> Self { self.callable_shader_binding_table_address = callable_shader_binding_table_address; self } #[inline] pub fn callable_shader_binding_table_size( mut self, callable_shader_binding_table_size: DeviceSize, ) -> Self { self.callable_shader_binding_table_size = callable_shader_binding_table_size; self } #[inline] pub fn callable_shader_binding_table_stride( mut self, callable_shader_binding_table_stride: DeviceSize, ) -> Self { self.callable_shader_binding_table_stride = callable_shader_binding_table_stride; self } #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn depth(mut self, depth: u32) -> Self { self.depth = depth; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_maintenance1: Bool32, pub ray_tracing_pipeline_trace_rays_indirect2: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_maintenance1: Bool32::default(), ray_tracing_pipeline_trace_rays_indirect2: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'_> {} impl<'a> PhysicalDeviceRayTracingMaintenance1FeaturesKHR<'a> { #[inline] pub fn ray_tracing_maintenance1(mut self, ray_tracing_maintenance1: bool) -> Self { self.ray_tracing_maintenance1 = ray_tracing_maintenance1.into(); self } #[inline] pub fn ray_tracing_pipeline_trace_rays_indirect2( mut self, ray_tracing_pipeline_trace_rays_indirect2: bool, ) -> Self { self.ray_tracing_pipeline_trace_rays_indirect2 = ray_tracing_pipeline_trace_rays_indirect2.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DrmFormatModifierPropertiesListEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub drm_format_modifier_count: u32, pub p_drm_format_modifier_properties: *mut DrmFormatModifierPropertiesEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DrmFormatModifierPropertiesListEXT<'_> {} unsafe impl Sync for DrmFormatModifierPropertiesListEXT<'_> {} impl ::core::default::Default for DrmFormatModifierPropertiesListEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), drm_format_modifier_count: u32::default(), p_drm_format_modifier_properties: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DrmFormatModifierPropertiesListEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT; } unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesListEXT<'_> {} impl<'a> DrmFormatModifierPropertiesListEXT<'a> { #[inline] pub fn drm_format_modifier_properties( mut self, drm_format_modifier_properties: &'a mut [DrmFormatModifierPropertiesEXT], ) -> Self { self.drm_format_modifier_count = drm_format_modifier_properties.len() as _; self.p_drm_format_modifier_properties = drm_format_modifier_properties.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrmFormatModifierPropertiesEXT { pub drm_format_modifier: u64, pub drm_format_modifier_plane_count: u32, pub drm_format_modifier_tiling_features: FormatFeatureFlags, } impl DrmFormatModifierPropertiesEXT { #[inline] pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self { self.drm_format_modifier = drm_format_modifier; self } #[inline] pub fn drm_format_modifier_plane_count(mut self, drm_format_modifier_plane_count: u32) -> Self { self.drm_format_modifier_plane_count = drm_format_modifier_plane_count; self } #[inline] pub fn drm_format_modifier_tiling_features( mut self, drm_format_modifier_tiling_features: FormatFeatureFlags, ) -> Self { self.drm_format_modifier_tiling_features = drm_format_modifier_tiling_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageDrmFormatModifierInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub drm_format_modifier: u64, pub sharing_mode: SharingMode, pub queue_family_index_count: u32, pub p_queue_family_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageDrmFormatModifierInfoEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageDrmFormatModifierInfoEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageDrmFormatModifierInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), drm_format_modifier: u64::default(), sharing_mode: SharingMode::default(), queue_family_index_count: u32::default(), p_queue_family_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageDrmFormatModifierInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT; } unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceImageDrmFormatModifierInfoEXT<'_> { } impl<'a> PhysicalDeviceImageDrmFormatModifierInfoEXT<'a> { #[inline] pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self { self.drm_format_modifier = drm_format_modifier; self } #[inline] pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self { self.sharing_mode = sharing_mode; self } #[inline] pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self { self.queue_family_index_count = queue_family_indices.len() as _; self.p_queue_family_indices = queue_family_indices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageDrmFormatModifierListCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub drm_format_modifier_count: u32, pub p_drm_format_modifiers: *const u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageDrmFormatModifierListCreateInfoEXT<'_> {} unsafe impl Sync for ImageDrmFormatModifierListCreateInfoEXT<'_> {} impl ::core::default::Default for ImageDrmFormatModifierListCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), drm_format_modifier_count: u32::default(), p_drm_format_modifiers: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageDrmFormatModifierListCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT; } unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierListCreateInfoEXT<'_> {} impl<'a> ImageDrmFormatModifierListCreateInfoEXT<'a> { #[inline] pub fn drm_format_modifiers(mut self, drm_format_modifiers: &'a [u64]) -> Self { self.drm_format_modifier_count = drm_format_modifiers.len() as _; self.p_drm_format_modifiers = drm_format_modifiers.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageDrmFormatModifierExplicitCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub drm_format_modifier: u64, pub drm_format_modifier_plane_count: u32, pub p_plane_layouts: *const SubresourceLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageDrmFormatModifierExplicitCreateInfoEXT<'_> {} unsafe impl Sync for ImageDrmFormatModifierExplicitCreateInfoEXT<'_> {} impl ::core::default::Default for ImageDrmFormatModifierExplicitCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), drm_format_modifier: u64::default(), drm_format_modifier_plane_count: u32::default(), p_plane_layouts: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageDrmFormatModifierExplicitCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT; } unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierExplicitCreateInfoEXT<'_> {} impl<'a> ImageDrmFormatModifierExplicitCreateInfoEXT<'a> { #[inline] pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self { self.drm_format_modifier = drm_format_modifier; self } #[inline] pub fn plane_layouts(mut self, plane_layouts: &'a [SubresourceLayout]) -> Self { self.drm_format_modifier_plane_count = plane_layouts.len() as _; self.p_plane_layouts = plane_layouts.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageDrmFormatModifierPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub drm_format_modifier: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageDrmFormatModifierPropertiesEXT<'_> {} unsafe impl Sync for ImageDrmFormatModifierPropertiesEXT<'_> {} impl ::core::default::Default for ImageDrmFormatModifierPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), drm_format_modifier: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageDrmFormatModifierPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT; } impl<'a> ImageDrmFormatModifierPropertiesEXT<'a> { #[inline] pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self { self.drm_format_modifier = drm_format_modifier; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageStencilUsageCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stencil_usage: ImageUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageStencilUsageCreateInfo<'_> {} unsafe impl Sync for ImageStencilUsageCreateInfo<'_> {} impl ::core::default::Default for ImageStencilUsageCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stencil_usage: ImageUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageStencilUsageCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_STENCIL_USAGE_CREATE_INFO; } unsafe impl ExtendsImageCreateInfo for ImageStencilUsageCreateInfo<'_> {} unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageStencilUsageCreateInfo<'_> {} impl<'a> ImageStencilUsageCreateInfo<'a> { #[inline] pub fn stencil_usage(mut self, stencil_usage: ImageUsageFlags) -> Self { self.stencil_usage = stencil_usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceMemoryOverallocationCreateInfoAMD<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub overallocation_behavior: MemoryOverallocationBehaviorAMD, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceMemoryOverallocationCreateInfoAMD<'_> {} unsafe impl Sync for DeviceMemoryOverallocationCreateInfoAMD<'_> {} impl ::core::default::Default for DeviceMemoryOverallocationCreateInfoAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), overallocation_behavior: MemoryOverallocationBehaviorAMD::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceMemoryOverallocationCreateInfoAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD; } unsafe impl ExtendsDeviceCreateInfo for DeviceMemoryOverallocationCreateInfoAMD<'_> {} impl<'a> DeviceMemoryOverallocationCreateInfoAMD<'a> { #[inline] pub fn overallocation_behavior( mut self, overallocation_behavior: MemoryOverallocationBehaviorAMD, ) -> Self { self.overallocation_behavior = overallocation_behavior; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMapFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_density_map: Bool32, pub fragment_density_map_dynamic: Bool32, pub fragment_density_map_non_subsampled_images: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMapFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMapFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMapFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_density_map: Bool32::default(), fragment_density_map_dynamic: Bool32::default(), fragment_density_map_non_subsampled_images: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMapFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMapFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapFeaturesEXT<'_> {} impl<'a> PhysicalDeviceFragmentDensityMapFeaturesEXT<'a> { #[inline] pub fn fragment_density_map(mut self, fragment_density_map: bool) -> Self { self.fragment_density_map = fragment_density_map.into(); self } #[inline] pub fn fragment_density_map_dynamic(mut self, fragment_density_map_dynamic: bool) -> Self { self.fragment_density_map_dynamic = fragment_density_map_dynamic.into(); self } #[inline] pub fn fragment_density_map_non_subsampled_images( mut self, fragment_density_map_non_subsampled_images: bool, ) -> Self { self.fragment_density_map_non_subsampled_images = fragment_density_map_non_subsampled_images.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMap2FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_density_map_deferred: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_density_map_deferred: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMap2FeaturesEXT<'_> {} impl<'a> PhysicalDeviceFragmentDensityMap2FeaturesEXT<'a> { #[inline] pub fn fragment_density_map_deferred(mut self, fragment_density_map_deferred: bool) -> Self { self.fragment_density_map_deferred = fragment_density_map_deferred.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_density_map_offset: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_density_map_offset: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM<'a> { #[inline] pub fn fragment_density_map_offset(mut self, fragment_density_map_offset: bool) -> Self { self.fragment_density_map_offset = fragment_density_map_offset.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMapPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_fragment_density_texel_size: Extent2D, pub max_fragment_density_texel_size: Extent2D, pub fragment_density_invocations: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMapPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMapPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMapPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_fragment_density_texel_size: Extent2D::default(), max_fragment_density_texel_size: Extent2D::default(), fragment_density_invocations: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMapPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMapPropertiesEXT<'_> {} impl<'a> PhysicalDeviceFragmentDensityMapPropertiesEXT<'a> { #[inline] pub fn min_fragment_density_texel_size( mut self, min_fragment_density_texel_size: Extent2D, ) -> Self { self.min_fragment_density_texel_size = min_fragment_density_texel_size; self } #[inline] pub fn max_fragment_density_texel_size( mut self, max_fragment_density_texel_size: Extent2D, ) -> Self { self.max_fragment_density_texel_size = max_fragment_density_texel_size; self } #[inline] pub fn fragment_density_invocations(mut self, fragment_density_invocations: bool) -> Self { self.fragment_density_invocations = fragment_density_invocations.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMap2PropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subsampled_loads: Bool32, pub subsampled_coarse_reconstruction_early_access: Bool32, pub max_subsampled_array_layers: u32, pub max_descriptor_set_subsampled_samplers: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMap2PropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMap2PropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMap2PropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subsampled_loads: Bool32::default(), subsampled_coarse_reconstruction_early_access: Bool32::default(), max_subsampled_array_layers: u32::default(), max_descriptor_set_subsampled_samplers: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMap2PropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMap2PropertiesEXT<'_> { } impl<'a> PhysicalDeviceFragmentDensityMap2PropertiesEXT<'a> { #[inline] pub fn subsampled_loads(mut self, subsampled_loads: bool) -> Self { self.subsampled_loads = subsampled_loads.into(); self } #[inline] pub fn subsampled_coarse_reconstruction_early_access( mut self, subsampled_coarse_reconstruction_early_access: bool, ) -> Self { self.subsampled_coarse_reconstruction_early_access = subsampled_coarse_reconstruction_early_access.into(); self } #[inline] pub fn max_subsampled_array_layers(mut self, max_subsampled_array_layers: u32) -> Self { self.max_subsampled_array_layers = max_subsampled_array_layers; self } #[inline] pub fn max_descriptor_set_subsampled_samplers( mut self, max_descriptor_set_subsampled_samplers: u32, ) -> Self { self.max_descriptor_set_subsampled_samplers = max_descriptor_set_subsampled_samplers; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_density_offset_granularity: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_density_offset_granularity: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'_> { } impl<'a> PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM<'a> { #[inline] pub fn fragment_density_offset_granularity( mut self, fragment_density_offset_granularity: Extent2D, ) -> Self { self.fragment_density_offset_granularity = fragment_density_offset_granularity; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassFragmentDensityMapCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fragment_density_map_attachment: AttachmentReference, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassFragmentDensityMapCreateInfoEXT<'_> {} unsafe impl Sync for RenderPassFragmentDensityMapCreateInfoEXT<'_> {} impl ::core::default::Default for RenderPassFragmentDensityMapCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fragment_density_map_attachment: AttachmentReference::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassFragmentDensityMapCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT; } unsafe impl ExtendsRenderPassCreateInfo for RenderPassFragmentDensityMapCreateInfoEXT<'_> {} unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassFragmentDensityMapCreateInfoEXT<'_> {} impl<'a> RenderPassFragmentDensityMapCreateInfoEXT<'a> { #[inline] pub fn fragment_density_map_attachment( mut self, fragment_density_map_attachment: AttachmentReference, ) -> Self { self.fragment_density_map_attachment = fragment_density_map_attachment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassFragmentDensityMapOffsetEndInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fragment_density_offset_count: u32, pub p_fragment_density_offsets: *const Offset2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassFragmentDensityMapOffsetEndInfoQCOM<'_> {} unsafe impl Sync for SubpassFragmentDensityMapOffsetEndInfoQCOM<'_> {} impl ::core::default::Default for SubpassFragmentDensityMapOffsetEndInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fragment_density_offset_count: u32::default(), p_fragment_density_offsets: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassFragmentDensityMapOffsetEndInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM; } unsafe impl ExtendsSubpassEndInfo for SubpassFragmentDensityMapOffsetEndInfoQCOM<'_> {} impl<'a> SubpassFragmentDensityMapOffsetEndInfoQCOM<'a> { #[inline] pub fn fragment_density_offsets(mut self, fragment_density_offsets: &'a [Offset2D]) -> Self { self.fragment_density_offset_count = fragment_density_offsets.len() as _; self.p_fragment_density_offsets = fragment_density_offsets.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceScalarBlockLayoutFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub scalar_block_layout: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceScalarBlockLayoutFeatures<'_> {} unsafe impl Sync for PhysicalDeviceScalarBlockLayoutFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceScalarBlockLayoutFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), scalar_block_layout: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceScalarBlockLayoutFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceScalarBlockLayoutFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceScalarBlockLayoutFeatures<'_> {} impl<'a> PhysicalDeviceScalarBlockLayoutFeatures<'a> { #[inline] pub fn scalar_block_layout(mut self, scalar_block_layout: bool) -> Self { self.scalar_block_layout = scalar_block_layout.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceProtectedCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub supports_protected: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceProtectedCapabilitiesKHR<'_> {} unsafe impl Sync for SurfaceProtectedCapabilitiesKHR<'_> {} impl ::core::default::Default for SurfaceProtectedCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), supports_protected: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceProtectedCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PROTECTED_CAPABILITIES_KHR; } unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceProtectedCapabilitiesKHR<'_> {} impl<'a> SurfaceProtectedCapabilitiesKHR<'a> { #[inline] pub fn supports_protected(mut self, supports_protected: bool) -> Self { self.supports_protected = supports_protected.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceUniformBufferStandardLayoutFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub uniform_buffer_standard_layout: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceUniformBufferStandardLayoutFeatures<'_> {} unsafe impl Sync for PhysicalDeviceUniformBufferStandardLayoutFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceUniformBufferStandardLayoutFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), uniform_buffer_standard_layout: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceUniformBufferStandardLayoutFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceUniformBufferStandardLayoutFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceUniformBufferStandardLayoutFeatures<'_> {} impl<'a> PhysicalDeviceUniformBufferStandardLayoutFeatures<'a> { #[inline] pub fn uniform_buffer_standard_layout(mut self, uniform_buffer_standard_layout: bool) -> Self { self.uniform_buffer_standard_layout = uniform_buffer_standard_layout.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDepthClipEnableFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub depth_clip_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDepthClipEnableFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDepthClipEnableFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDepthClipEnableFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), depth_clip_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDepthClipEnableFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClipEnableFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipEnableFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDepthClipEnableFeaturesEXT<'a> { #[inline] pub fn depth_clip_enable(mut self, depth_clip_enable: bool) -> Self { self.depth_clip_enable = depth_clip_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationDepthClipStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineRasterizationDepthClipStateCreateFlagsEXT, pub depth_clip_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationDepthClipStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineRasterizationDepthClipStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineRasterizationDepthClipStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineRasterizationDepthClipStateCreateFlagsEXT::default(), depth_clip_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationDepthClipStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationDepthClipStateCreateInfoEXT<'_> { } impl<'a> PipelineRasterizationDepthClipStateCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: PipelineRasterizationDepthClipStateCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn depth_clip_enable(mut self, depth_clip_enable: bool) -> Self { self.depth_clip_enable = depth_clip_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryBudgetPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub heap_budget: [DeviceSize; MAX_MEMORY_HEAPS], pub heap_usage: [DeviceSize; MAX_MEMORY_HEAPS], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMemoryBudgetPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMemoryBudgetPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMemoryBudgetPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), heap_budget: unsafe { ::core::mem::zeroed() }, heap_usage: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMemoryBudgetPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceMemoryProperties2 for PhysicalDeviceMemoryBudgetPropertiesEXT<'_> {} impl<'a> PhysicalDeviceMemoryBudgetPropertiesEXT<'a> { #[inline] pub fn heap_budget(mut self, heap_budget: [DeviceSize; MAX_MEMORY_HEAPS]) -> Self { self.heap_budget = heap_budget; self } #[inline] pub fn heap_usage(mut self, heap_usage: [DeviceSize; MAX_MEMORY_HEAPS]) -> Self { self.heap_usage = heap_usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMemoryPriorityFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_priority: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMemoryPriorityFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMemoryPriorityFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMemoryPriorityFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_priority: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMemoryPriorityFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMemoryPriorityFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryPriorityFeaturesEXT<'_> {} impl<'a> PhysicalDeviceMemoryPriorityFeaturesEXT<'a> { #[inline] pub fn memory_priority(mut self, memory_priority: bool) -> Self { self.memory_priority = memory_priority.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryPriorityAllocateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub priority: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryPriorityAllocateInfoEXT<'_> {} unsafe impl Sync for MemoryPriorityAllocateInfoEXT<'_> {} impl ::core::default::Default for MemoryPriorityAllocateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), priority: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryPriorityAllocateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_PRIORITY_ALLOCATE_INFO_EXT; } unsafe impl ExtendsMemoryAllocateInfo for MemoryPriorityAllocateInfoEXT<'_> {} impl<'a> MemoryPriorityAllocateInfoEXT<'a> { #[inline] pub fn priority(mut self, priority: f32) -> Self { self.priority = priority; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pageable_device_local_memory: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pageable_device_local_memory: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'_> {} impl<'a> PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT<'a> { #[inline] pub fn pageable_device_local_memory(mut self, pageable_device_local_memory: bool) -> Self { self.pageable_device_local_memory = pageable_device_local_memory.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceBufferDeviceAddressFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub buffer_device_address: Bool32, pub buffer_device_address_capture_replay: Bool32, pub buffer_device_address_multi_device: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceBufferDeviceAddressFeatures<'_> {} unsafe impl Sync for PhysicalDeviceBufferDeviceAddressFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceBufferDeviceAddressFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), buffer_device_address: Bool32::default(), buffer_device_address_capture_replay: Bool32::default(), buffer_device_address_multi_device: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceBufferDeviceAddressFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBufferDeviceAddressFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeatures<'_> {} impl<'a> PhysicalDeviceBufferDeviceAddressFeatures<'a> { #[inline] pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self { self.buffer_device_address = buffer_device_address.into(); self } #[inline] pub fn buffer_device_address_capture_replay( mut self, buffer_device_address_capture_replay: bool, ) -> Self { self.buffer_device_address_capture_replay = buffer_device_address_capture_replay.into(); self } #[inline] pub fn buffer_device_address_multi_device( mut self, buffer_device_address_multi_device: bool, ) -> Self { self.buffer_device_address_multi_device = buffer_device_address_multi_device.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceBufferDeviceAddressFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub buffer_device_address: Bool32, pub buffer_device_address_capture_replay: Bool32, pub buffer_device_address_multi_device: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), buffer_device_address: Bool32::default(), buffer_device_address_capture_replay: Bool32::default(), buffer_device_address_multi_device: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeaturesEXT<'_> {} impl<'a> PhysicalDeviceBufferDeviceAddressFeaturesEXT<'a> { #[inline] pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self { self.buffer_device_address = buffer_device_address.into(); self } #[inline] pub fn buffer_device_address_capture_replay( mut self, buffer_device_address_capture_replay: bool, ) -> Self { self.buffer_device_address_capture_replay = buffer_device_address_capture_replay.into(); self } #[inline] pub fn buffer_device_address_multi_device( mut self, buffer_device_address_multi_device: bool, ) -> Self { self.buffer_device_address_multi_device = buffer_device_address_multi_device.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferDeviceAddressInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferDeviceAddressInfo<'_> {} unsafe impl Sync for BufferDeviceAddressInfo<'_> {} impl ::core::default::Default for BufferDeviceAddressInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferDeviceAddressInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_DEVICE_ADDRESS_INFO; } impl<'a> BufferDeviceAddressInfo<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferOpaqueCaptureAddressCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub opaque_capture_address: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferOpaqueCaptureAddressCreateInfo<'_> {} unsafe impl Sync for BufferOpaqueCaptureAddressCreateInfo<'_> {} impl ::core::default::Default for BufferOpaqueCaptureAddressCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), opaque_capture_address: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferOpaqueCaptureAddressCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO; } unsafe impl ExtendsBufferCreateInfo for BufferOpaqueCaptureAddressCreateInfo<'_> {} impl<'a> BufferOpaqueCaptureAddressCreateInfo<'a> { #[inline] pub fn opaque_capture_address(mut self, opaque_capture_address: u64) -> Self { self.opaque_capture_address = opaque_capture_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferDeviceAddressCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub device_address: DeviceAddress, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferDeviceAddressCreateInfoEXT<'_> {} unsafe impl Sync for BufferDeviceAddressCreateInfoEXT<'_> {} impl ::core::default::Default for BufferDeviceAddressCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), device_address: DeviceAddress::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferDeviceAddressCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT; } unsafe impl ExtendsBufferCreateInfo for BufferDeviceAddressCreateInfoEXT<'_> {} impl<'a> BufferDeviceAddressCreateInfoEXT<'a> { #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageViewImageFormatInfoEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_view_type: ImageViewType, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageViewImageFormatInfoEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageViewImageFormatInfoEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageViewImageFormatInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_view_type: ImageViewType::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageViewImageFormatInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT; } unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceImageViewImageFormatInfoEXT<'_> { } impl<'a> PhysicalDeviceImageViewImageFormatInfoEXT<'a> { #[inline] pub fn image_view_type(mut self, image_view_type: ImageViewType) -> Self { self.image_view_type = image_view_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FilterCubicImageViewImageFormatPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub filter_cubic: Bool32, pub filter_cubic_minmax: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FilterCubicImageViewImageFormatPropertiesEXT<'_> {} unsafe impl Sync for FilterCubicImageViewImageFormatPropertiesEXT<'_> {} impl ::core::default::Default for FilterCubicImageViewImageFormatPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), filter_cubic: Bool32::default(), filter_cubic_minmax: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FilterCubicImageViewImageFormatPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT; } unsafe impl ExtendsImageFormatProperties2 for FilterCubicImageViewImageFormatPropertiesEXT<'_> {} impl<'a> FilterCubicImageViewImageFormatPropertiesEXT<'a> { #[inline] pub fn filter_cubic(mut self, filter_cubic: bool) -> Self { self.filter_cubic = filter_cubic.into(); self } #[inline] pub fn filter_cubic_minmax(mut self, filter_cubic_minmax: bool) -> Self { self.filter_cubic_minmax = filter_cubic_minmax.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImagelessFramebufferFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub imageless_framebuffer: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImagelessFramebufferFeatures<'_> {} unsafe impl Sync for PhysicalDeviceImagelessFramebufferFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceImagelessFramebufferFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), imageless_framebuffer: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImagelessFramebufferFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImagelessFramebufferFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImagelessFramebufferFeatures<'_> {} impl<'a> PhysicalDeviceImagelessFramebufferFeatures<'a> { #[inline] pub fn imageless_framebuffer(mut self, imageless_framebuffer: bool) -> Self { self.imageless_framebuffer = imageless_framebuffer.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FramebufferAttachmentsCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub attachment_image_info_count: u32, pub p_attachment_image_infos: *const FramebufferAttachmentImageInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FramebufferAttachmentsCreateInfo<'_> {} unsafe impl Sync for FramebufferAttachmentsCreateInfo<'_> {} impl ::core::default::Default for FramebufferAttachmentsCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), attachment_image_info_count: u32::default(), p_attachment_image_infos: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FramebufferAttachmentsCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO; } unsafe impl ExtendsFramebufferCreateInfo for FramebufferAttachmentsCreateInfo<'_> {} impl<'a> FramebufferAttachmentsCreateInfo<'a> { #[inline] pub fn attachment_image_infos( mut self, attachment_image_infos: &'a [FramebufferAttachmentImageInfo<'a>], ) -> Self { self.attachment_image_info_count = attachment_image_infos.len() as _; self.p_attachment_image_infos = attachment_image_infos.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FramebufferAttachmentImageInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ImageCreateFlags, pub usage: ImageUsageFlags, pub width: u32, pub height: u32, pub layer_count: u32, pub view_format_count: u32, pub p_view_formats: *const Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FramebufferAttachmentImageInfo<'_> {} unsafe impl Sync for FramebufferAttachmentImageInfo<'_> {} impl ::core::default::Default for FramebufferAttachmentImageInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ImageCreateFlags::default(), usage: ImageUsageFlags::default(), width: u32::default(), height: u32::default(), layer_count: u32::default(), view_format_count: u32::default(), p_view_formats: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FramebufferAttachmentImageInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO; } impl<'a> FramebufferAttachmentImageInfo<'a> { #[inline] pub fn flags(mut self, flags: ImageCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn usage(mut self, usage: ImageUsageFlags) -> Self { self.usage = usage; self } #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn layer_count(mut self, layer_count: u32) -> Self { self.layer_count = layer_count; self } #[inline] pub fn view_formats(mut self, view_formats: &'a [Format]) -> Self { self.view_format_count = view_formats.len() as _; self.p_view_formats = view_formats.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassAttachmentBeginInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub attachment_count: u32, pub p_attachments: *const ImageView, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassAttachmentBeginInfo<'_> {} unsafe impl Sync for RenderPassAttachmentBeginInfo<'_> {} impl ::core::default::Default for RenderPassAttachmentBeginInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), attachment_count: u32::default(), p_attachments: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassAttachmentBeginInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_ATTACHMENT_BEGIN_INFO; } unsafe impl ExtendsRenderPassBeginInfo for RenderPassAttachmentBeginInfo<'_> {} impl<'a> RenderPassAttachmentBeginInfo<'a> { #[inline] pub fn attachments(mut self, attachments: &'a [ImageView]) -> Self { self.attachment_count = attachments.len() as _; self.p_attachments = attachments.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTextureCompressionASTCHDRFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub texture_compression_astc_hdr: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTextureCompressionASTCHDRFeatures<'_> {} unsafe impl Sync for PhysicalDeviceTextureCompressionASTCHDRFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceTextureCompressionASTCHDRFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), texture_compression_astc_hdr: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTextureCompressionASTCHDRFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTextureCompressionASTCHDRFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTextureCompressionASTCHDRFeatures<'_> {} impl<'a> PhysicalDeviceTextureCompressionASTCHDRFeatures<'a> { #[inline] pub fn texture_compression_astc_hdr(mut self, texture_compression_astc_hdr: bool) -> Self { self.texture_compression_astc_hdr = texture_compression_astc_hdr.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCooperativeMatrixFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cooperative_matrix: Bool32, pub cooperative_matrix_robust_buffer_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCooperativeMatrixFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceCooperativeMatrixFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCooperativeMatrixFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cooperative_matrix: Bool32::default(), cooperative_matrix_robust_buffer_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCooperativeMatrixFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCooperativeMatrixFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCooperativeMatrixFeaturesNV<'_> {} impl<'a> PhysicalDeviceCooperativeMatrixFeaturesNV<'a> { #[inline] pub fn cooperative_matrix(mut self, cooperative_matrix: bool) -> Self { self.cooperative_matrix = cooperative_matrix.into(); self } #[inline] pub fn cooperative_matrix_robust_buffer_access( mut self, cooperative_matrix_robust_buffer_access: bool, ) -> Self { self.cooperative_matrix_robust_buffer_access = cooperative_matrix_robust_buffer_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCooperativeMatrixPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cooperative_matrix_supported_stages: ShaderStageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCooperativeMatrixPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceCooperativeMatrixPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCooperativeMatrixPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cooperative_matrix_supported_stages: ShaderStageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCooperativeMatrixPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCooperativeMatrixPropertiesNV<'_> {} impl<'a> PhysicalDeviceCooperativeMatrixPropertiesNV<'a> { #[inline] pub fn cooperative_matrix_supported_stages( mut self, cooperative_matrix_supported_stages: ShaderStageFlags, ) -> Self { self.cooperative_matrix_supported_stages = cooperative_matrix_supported_stages; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CooperativeMatrixPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub m_size: u32, pub n_size: u32, pub k_size: u32, pub a_type: ComponentTypeNV, pub b_type: ComponentTypeNV, pub c_type: ComponentTypeNV, pub d_type: ComponentTypeNV, pub scope: ScopeNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CooperativeMatrixPropertiesNV<'_> {} unsafe impl Sync for CooperativeMatrixPropertiesNV<'_> {} impl ::core::default::Default for CooperativeMatrixPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), m_size: u32::default(), n_size: u32::default(), k_size: u32::default(), a_type: ComponentTypeNV::default(), b_type: ComponentTypeNV::default(), c_type: ComponentTypeNV::default(), d_type: ComponentTypeNV::default(), scope: ScopeNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CooperativeMatrixPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COOPERATIVE_MATRIX_PROPERTIES_NV; } impl<'a> CooperativeMatrixPropertiesNV<'a> { #[inline] pub fn m_size(mut self, m_size: u32) -> Self { self.m_size = m_size; self } #[inline] pub fn n_size(mut self, n_size: u32) -> Self { self.n_size = n_size; self } #[inline] pub fn k_size(mut self, k_size: u32) -> Self { self.k_size = k_size; self } #[inline] pub fn a_type(mut self, a_type: ComponentTypeNV) -> Self { self.a_type = a_type; self } #[inline] pub fn b_type(mut self, b_type: ComponentTypeNV) -> Self { self.b_type = b_type; self } #[inline] pub fn c_type(mut self, c_type: ComponentTypeNV) -> Self { self.c_type = c_type; self } #[inline] pub fn d_type(mut self, d_type: ComponentTypeNV) -> Self { self.d_type = d_type; self } #[inline] pub fn scope(mut self, scope: ScopeNV) -> Self { self.scope = scope; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceYcbcrImageArraysFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ycbcr_image_arrays: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ycbcr_image_arrays: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcrImageArraysFeaturesEXT<'_> {} impl<'a> PhysicalDeviceYcbcrImageArraysFeaturesEXT<'a> { #[inline] pub fn ycbcr_image_arrays(mut self, ycbcr_image_arrays: bool) -> Self { self.ycbcr_image_arrays = ycbcr_image_arrays.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewHandleInfoNVX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_view: ImageView, pub descriptor_type: DescriptorType, pub sampler: Sampler, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewHandleInfoNVX<'_> {} unsafe impl Sync for ImageViewHandleInfoNVX<'_> {} impl ::core::default::Default for ImageViewHandleInfoNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_view: ImageView::default(), descriptor_type: DescriptorType::default(), sampler: Sampler::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewHandleInfoNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_HANDLE_INFO_NVX; } impl<'a> ImageViewHandleInfoNVX<'a> { #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self { self.descriptor_type = descriptor_type; self } #[inline] pub fn sampler(mut self, sampler: Sampler) -> Self { self.sampler = sampler; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewAddressPropertiesNVX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_address: DeviceAddress, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewAddressPropertiesNVX<'_> {} unsafe impl Sync for ImageViewAddressPropertiesNVX<'_> {} impl ::core::default::Default for ImageViewAddressPropertiesNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_address: DeviceAddress::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewAddressPropertiesNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_ADDRESS_PROPERTIES_NVX; } impl<'a> ImageViewAddressPropertiesNVX<'a> { #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PresentFrameTokenGGP<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub frame_token: GgpFrameToken, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PresentFrameTokenGGP<'_> {} unsafe impl Sync for PresentFrameTokenGGP<'_> {} impl ::core::default::Default for PresentFrameTokenGGP<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), frame_token: GgpFrameToken::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PresentFrameTokenGGP<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_FRAME_TOKEN_GGP; } unsafe impl ExtendsPresentInfoKHR for PresentFrameTokenGGP<'_> {} impl<'a> PresentFrameTokenGGP<'a> { #[inline] pub fn frame_token(mut self, frame_token: GgpFrameToken) -> Self { self.frame_token = frame_token; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PipelineCreationFeedback { pub flags: PipelineCreationFeedbackFlags, pub duration: u64, } impl PipelineCreationFeedback { #[inline] pub fn flags(mut self, flags: PipelineCreationFeedbackFlags) -> Self { self.flags = flags; self } #[inline] pub fn duration(mut self, duration: u64) -> Self { self.duration = duration; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCreationFeedbackCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_pipeline_creation_feedback: *mut PipelineCreationFeedback, pub pipeline_stage_creation_feedback_count: u32, pub p_pipeline_stage_creation_feedbacks: *mut PipelineCreationFeedback, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCreationFeedbackCreateInfo<'_> {} unsafe impl Sync for PipelineCreationFeedbackCreateInfo<'_> {} impl ::core::default::Default for PipelineCreationFeedbackCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_pipeline_creation_feedback: ::core::ptr::null_mut(), pipeline_stage_creation_feedback_count: u32::default(), p_pipeline_stage_creation_feedbacks: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCreationFeedbackCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_CREATION_FEEDBACK_CREATE_INFO; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCreationFeedbackCreateInfo<'_> {} unsafe impl ExtendsComputePipelineCreateInfo for PipelineCreationFeedbackCreateInfo<'_> {} unsafe impl ExtendsRayTracingPipelineCreateInfoNV for PipelineCreationFeedbackCreateInfo<'_> {} unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineCreationFeedbackCreateInfo<'_> {} unsafe impl ExtendsExecutionGraphPipelineCreateInfoAMDX for PipelineCreationFeedbackCreateInfo<'_> {} impl<'a> PipelineCreationFeedbackCreateInfo<'a> { #[inline] pub fn pipeline_creation_feedback( mut self, pipeline_creation_feedback: &'a mut PipelineCreationFeedback, ) -> Self { self.p_pipeline_creation_feedback = pipeline_creation_feedback; self } #[inline] pub fn pipeline_stage_creation_feedbacks( mut self, pipeline_stage_creation_feedbacks: &'a mut [PipelineCreationFeedback], ) -> Self { self.pipeline_stage_creation_feedback_count = pipeline_stage_creation_feedbacks.len() as _; self.p_pipeline_stage_creation_feedbacks = pipeline_stage_creation_feedbacks.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceFullScreenExclusiveInfoEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub full_screen_exclusive: FullScreenExclusiveEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceFullScreenExclusiveInfoEXT<'_> {} unsafe impl Sync for SurfaceFullScreenExclusiveInfoEXT<'_> {} impl ::core::default::Default for SurfaceFullScreenExclusiveInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), full_screen_exclusive: FullScreenExclusiveEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceFullScreenExclusiveInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT; } unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfaceFullScreenExclusiveInfoEXT<'_> {} unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveInfoEXT<'_> {} impl<'a> SurfaceFullScreenExclusiveInfoEXT<'a> { #[inline] pub fn full_screen_exclusive(mut self, full_screen_exclusive: FullScreenExclusiveEXT) -> Self { self.full_screen_exclusive = full_screen_exclusive; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceFullScreenExclusiveWin32InfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub hmonitor: HMONITOR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceFullScreenExclusiveWin32InfoEXT<'_> {} unsafe impl Sync for SurfaceFullScreenExclusiveWin32InfoEXT<'_> {} impl ::core::default::Default for SurfaceFullScreenExclusiveWin32InfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), hmonitor: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceFullScreenExclusiveWin32InfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT; } unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfaceFullScreenExclusiveWin32InfoEXT<'_> {} unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveWin32InfoEXT<'_> {} impl<'a> SurfaceFullScreenExclusiveWin32InfoEXT<'a> { #[inline] pub fn hmonitor(mut self, hmonitor: HMONITOR) -> Self { self.hmonitor = hmonitor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceCapabilitiesFullScreenExclusiveEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub full_screen_exclusive_supported: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceCapabilitiesFullScreenExclusiveEXT<'_> {} unsafe impl Sync for SurfaceCapabilitiesFullScreenExclusiveEXT<'_> {} impl ::core::default::Default for SurfaceCapabilitiesFullScreenExclusiveEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), full_screen_exclusive_supported: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceCapabilitiesFullScreenExclusiveEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT; } unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceCapabilitiesFullScreenExclusiveEXT<'_> {} impl<'a> SurfaceCapabilitiesFullScreenExclusiveEXT<'a> { #[inline] pub fn full_screen_exclusive_supported( mut self, full_screen_exclusive_supported: bool, ) -> Self { self.full_screen_exclusive_supported = full_screen_exclusive_supported.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePresentBarrierFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_barrier: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePresentBarrierFeaturesNV<'_> {} unsafe impl Sync for PhysicalDevicePresentBarrierFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDevicePresentBarrierFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_barrier: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePresentBarrierFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentBarrierFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentBarrierFeaturesNV<'_> {} impl<'a> PhysicalDevicePresentBarrierFeaturesNV<'a> { #[inline] pub fn present_barrier(mut self, present_barrier: bool) -> Self { self.present_barrier = present_barrier.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfaceCapabilitiesPresentBarrierNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_barrier_supported: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfaceCapabilitiesPresentBarrierNV<'_> {} unsafe impl Sync for SurfaceCapabilitiesPresentBarrierNV<'_> {} impl ::core::default::Default for SurfaceCapabilitiesPresentBarrierNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_barrier_supported: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfaceCapabilitiesPresentBarrierNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_PRESENT_BARRIER_NV; } unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceCapabilitiesPresentBarrierNV<'_> {} impl<'a> SurfaceCapabilitiesPresentBarrierNV<'a> { #[inline] pub fn present_barrier_supported(mut self, present_barrier_supported: bool) -> Self { self.present_barrier_supported = present_barrier_supported.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainPresentBarrierCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_barrier_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainPresentBarrierCreateInfoNV<'_> {} unsafe impl Sync for SwapchainPresentBarrierCreateInfoNV<'_> {} impl ::core::default::Default for SwapchainPresentBarrierCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_barrier_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainPresentBarrierCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentBarrierCreateInfoNV<'_> {} impl<'a> SwapchainPresentBarrierCreateInfoNV<'a> { #[inline] pub fn present_barrier_enable(mut self, present_barrier_enable: bool) -> Self { self.present_barrier_enable = present_barrier_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePerformanceQueryFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub performance_counter_query_pools: Bool32, pub performance_counter_multiple_query_pools: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePerformanceQueryFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDevicePerformanceQueryFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePerformanceQueryFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), performance_counter_query_pools: Bool32::default(), performance_counter_multiple_query_pools: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePerformanceQueryFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePerformanceQueryFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePerformanceQueryFeaturesKHR<'_> {} impl<'a> PhysicalDevicePerformanceQueryFeaturesKHR<'a> { #[inline] pub fn performance_counter_query_pools( mut self, performance_counter_query_pools: bool, ) -> Self { self.performance_counter_query_pools = performance_counter_query_pools.into(); self } #[inline] pub fn performance_counter_multiple_query_pools( mut self, performance_counter_multiple_query_pools: bool, ) -> Self { self.performance_counter_multiple_query_pools = performance_counter_multiple_query_pools.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePerformanceQueryPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub allow_command_buffer_query_copies: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePerformanceQueryPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDevicePerformanceQueryPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePerformanceQueryPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), allow_command_buffer_query_copies: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePerformanceQueryPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePerformanceQueryPropertiesKHR<'_> {} impl<'a> PhysicalDevicePerformanceQueryPropertiesKHR<'a> { #[inline] pub fn allow_command_buffer_query_copies( mut self, allow_command_buffer_query_copies: bool, ) -> Self { self.allow_command_buffer_query_copies = allow_command_buffer_query_copies.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceCounterKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub unit: PerformanceCounterUnitKHR, pub scope: PerformanceCounterScopeKHR, pub storage: PerformanceCounterStorageKHR, pub uuid: [u8; UUID_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceCounterKHR<'_> {} unsafe impl Sync for PerformanceCounterKHR<'_> {} impl ::core::default::Default for PerformanceCounterKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), unit: PerformanceCounterUnitKHR::default(), scope: PerformanceCounterScopeKHR::default(), storage: PerformanceCounterStorageKHR::default(), uuid: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceCounterKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_COUNTER_KHR; } impl<'a> PerformanceCounterKHR<'a> { #[inline] pub fn unit(mut self, unit: PerformanceCounterUnitKHR) -> Self { self.unit = unit; self } #[inline] pub fn scope(mut self, scope: PerformanceCounterScopeKHR) -> Self { self.scope = scope; self } #[inline] pub fn storage(mut self, storage: PerformanceCounterStorageKHR) -> Self { self.storage = storage; self } #[inline] pub fn uuid(mut self, uuid: [u8; UUID_SIZE]) -> Self { self.uuid = uuid; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceCounterDescriptionKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: PerformanceCounterDescriptionFlagsKHR, pub name: [c_char; MAX_DESCRIPTION_SIZE], pub category: [c_char; MAX_DESCRIPTION_SIZE], pub description: [c_char; MAX_DESCRIPTION_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceCounterDescriptionKHR<'_> {} unsafe impl Sync for PerformanceCounterDescriptionKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PerformanceCounterDescriptionKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PerformanceCounterDescriptionKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("flags", &self.flags) .field("name", &self.name_as_c_str()) .field("category", &self.category_as_c_str()) .field("description", &self.description_as_c_str()) .finish() } } impl ::core::default::Default for PerformanceCounterDescriptionKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: PerformanceCounterDescriptionFlagsKHR::default(), name: unsafe { ::core::mem::zeroed() }, category: unsafe { ::core::mem::zeroed() }, description: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceCounterDescriptionKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_COUNTER_DESCRIPTION_KHR; } impl<'a> PerformanceCounterDescriptionKHR<'a> { #[inline] pub fn flags(mut self, flags: PerformanceCounterDescriptionFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn name(mut self, name: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.name, name).map(|()| self) } #[inline] pub fn name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.name) } #[inline] pub fn category( mut self, category: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.category, category).map(|()| self) } #[inline] pub fn category_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.category) } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueryPoolPerformanceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub queue_family_index: u32, pub counter_index_count: u32, pub p_counter_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueryPoolPerformanceCreateInfoKHR<'_> {} unsafe impl Sync for QueryPoolPerformanceCreateInfoKHR<'_> {} impl ::core::default::Default for QueryPoolPerformanceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), queue_family_index: u32::default(), counter_index_count: u32::default(), p_counter_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueryPoolPerformanceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR; } unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceCreateInfoKHR<'_> {} impl<'a> QueryPoolPerformanceCreateInfoKHR<'a> { #[inline] pub fn queue_family_index(mut self, queue_family_index: u32) -> Self { self.queue_family_index = queue_family_index; self } #[inline] pub fn counter_indices(mut self, counter_indices: &'a [u32]) -> Self { self.counter_index_count = counter_indices.len() as _; self.p_counter_indices = counter_indices.as_ptr(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union PerformanceCounterResultKHR { pub int32: i32, pub int64: i64, pub uint32: u32, pub uint64: u64, pub float32: f32, pub float64: f64, } impl ::core::default::Default for PerformanceCounterResultKHR { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AcquireProfilingLockInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: AcquireProfilingLockFlagsKHR, pub timeout: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AcquireProfilingLockInfoKHR<'_> {} unsafe impl Sync for AcquireProfilingLockInfoKHR<'_> {} impl ::core::default::Default for AcquireProfilingLockInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: AcquireProfilingLockFlagsKHR::default(), timeout: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AcquireProfilingLockInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACQUIRE_PROFILING_LOCK_INFO_KHR; } impl<'a> AcquireProfilingLockInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: AcquireProfilingLockFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn timeout(mut self, timeout: u64) -> Self { self.timeout = timeout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceQuerySubmitInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub counter_pass_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceQuerySubmitInfoKHR<'_> {} unsafe impl Sync for PerformanceQuerySubmitInfoKHR<'_> {} impl ::core::default::Default for PerformanceQuerySubmitInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), counter_pass_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceQuerySubmitInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_QUERY_SUBMIT_INFO_KHR; } unsafe impl ExtendsSubmitInfo for PerformanceQuerySubmitInfoKHR<'_> {} unsafe impl ExtendsSubmitInfo2 for PerformanceQuerySubmitInfoKHR<'_> {} impl<'a> PerformanceQuerySubmitInfoKHR<'a> { #[inline] pub fn counter_pass_index(mut self, counter_pass_index: u32) -> Self { self.counter_pass_index = counter_pass_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct HeadlessSurfaceCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: HeadlessSurfaceCreateFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for HeadlessSurfaceCreateInfoEXT<'_> {} unsafe impl Sync for HeadlessSurfaceCreateInfoEXT<'_> {} impl ::core::default::Default for HeadlessSurfaceCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: HeadlessSurfaceCreateFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for HeadlessSurfaceCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::HEADLESS_SURFACE_CREATE_INFO_EXT; } impl<'a> HeadlessSurfaceCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: HeadlessSurfaceCreateFlagsEXT) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCoverageReductionModeFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub coverage_reduction_mode: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCoverageReductionModeFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceCoverageReductionModeFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCoverageReductionModeFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), coverage_reduction_mode: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCoverageReductionModeFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCoverageReductionModeFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoverageReductionModeFeaturesNV<'_> {} impl<'a> PhysicalDeviceCoverageReductionModeFeaturesNV<'a> { #[inline] pub fn coverage_reduction_mode(mut self, coverage_reduction_mode: bool) -> Self { self.coverage_reduction_mode = coverage_reduction_mode.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCoverageReductionStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCoverageReductionStateCreateFlagsNV, pub coverage_reduction_mode: CoverageReductionModeNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCoverageReductionStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineCoverageReductionStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineCoverageReductionStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCoverageReductionStateCreateFlagsNV::default(), coverage_reduction_mode: CoverageReductionModeNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCoverageReductionStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV; } unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineCoverageReductionStateCreateInfoNV<'_> { } impl<'a> PipelineCoverageReductionStateCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: PipelineCoverageReductionStateCreateFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn coverage_reduction_mode( mut self, coverage_reduction_mode: CoverageReductionModeNV, ) -> Self { self.coverage_reduction_mode = coverage_reduction_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FramebufferMixedSamplesCombinationNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub coverage_reduction_mode: CoverageReductionModeNV, pub rasterization_samples: SampleCountFlags, pub depth_stencil_samples: SampleCountFlags, pub color_samples: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FramebufferMixedSamplesCombinationNV<'_> {} unsafe impl Sync for FramebufferMixedSamplesCombinationNV<'_> {} impl ::core::default::Default for FramebufferMixedSamplesCombinationNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), coverage_reduction_mode: CoverageReductionModeNV::default(), rasterization_samples: SampleCountFlags::default(), depth_stencil_samples: SampleCountFlags::default(), color_samples: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FramebufferMixedSamplesCombinationNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV; } impl<'a> FramebufferMixedSamplesCombinationNV<'a> { #[inline] pub fn coverage_reduction_mode( mut self, coverage_reduction_mode: CoverageReductionModeNV, ) -> Self { self.coverage_reduction_mode = coverage_reduction_mode; self } #[inline] pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self { self.rasterization_samples = rasterization_samples; self } #[inline] pub fn depth_stencil_samples(mut self, depth_stencil_samples: SampleCountFlags) -> Self { self.depth_stencil_samples = depth_stencil_samples; self } #[inline] pub fn color_samples(mut self, color_samples: SampleCountFlags) -> Self { self.color_samples = color_samples; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_integer_functions2: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'_> {} unsafe impl Sync for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'_> {} impl ::core::default::Default for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_integer_functions2: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'_> {} impl<'a> PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL<'a> { #[inline] pub fn shader_integer_functions2(mut self, shader_integer_functions2: bool) -> Self { self.shader_integer_functions2 = shader_integer_functions2.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union PerformanceValueDataINTEL { pub value32: u32, pub value64: u64, pub value_float: f32, pub value_bool: Bool32, pub value_string: *const c_char, } impl ::core::default::Default for PerformanceValueDataINTEL { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct PerformanceValueINTEL { pub ty: PerformanceValueTypeINTEL, pub data: PerformanceValueDataINTEL, } #[cfg(feature = "debug")] impl fmt::Debug for PerformanceValueINTEL { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PerformanceValueINTEL") .field("ty", &self.ty) .field("data", &"union") .finish() } } impl PerformanceValueINTEL { #[inline] pub fn ty(mut self, ty: PerformanceValueTypeINTEL) -> Self { self.ty = ty; self } #[inline] pub fn data(mut self, data: PerformanceValueDataINTEL) -> Self { self.data = data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct InitializePerformanceApiInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_user_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for InitializePerformanceApiInfoINTEL<'_> {} unsafe impl Sync for InitializePerformanceApiInfoINTEL<'_> {} impl ::core::default::Default for InitializePerformanceApiInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_user_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for InitializePerformanceApiInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::INITIALIZE_PERFORMANCE_API_INFO_INTEL; } impl<'a> InitializePerformanceApiInfoINTEL<'a> { #[inline] pub fn user_data(mut self, user_data: *mut c_void) -> Self { self.p_user_data = user_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueryPoolPerformanceQueryCreateInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub performance_counters_sampling: QueryPoolSamplingModeINTEL, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueryPoolPerformanceQueryCreateInfoINTEL<'_> {} unsafe impl Sync for QueryPoolPerformanceQueryCreateInfoINTEL<'_> {} impl ::core::default::Default for QueryPoolPerformanceQueryCreateInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), performance_counters_sampling: QueryPoolSamplingModeINTEL::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueryPoolPerformanceQueryCreateInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL; } unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceQueryCreateInfoINTEL<'_> {} impl<'a> QueryPoolPerformanceQueryCreateInfoINTEL<'a> { #[inline] pub fn performance_counters_sampling( mut self, performance_counters_sampling: QueryPoolSamplingModeINTEL, ) -> Self { self.performance_counters_sampling = performance_counters_sampling; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceMarkerInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub marker: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceMarkerInfoINTEL<'_> {} unsafe impl Sync for PerformanceMarkerInfoINTEL<'_> {} impl ::core::default::Default for PerformanceMarkerInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), marker: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceMarkerInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_MARKER_INFO_INTEL; } impl<'a> PerformanceMarkerInfoINTEL<'a> { #[inline] pub fn marker(mut self, marker: u64) -> Self { self.marker = marker; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceStreamMarkerInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub marker: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceStreamMarkerInfoINTEL<'_> {} unsafe impl Sync for PerformanceStreamMarkerInfoINTEL<'_> {} impl ::core::default::Default for PerformanceStreamMarkerInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), marker: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceStreamMarkerInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_STREAM_MARKER_INFO_INTEL; } impl<'a> PerformanceStreamMarkerInfoINTEL<'a> { #[inline] pub fn marker(mut self, marker: u32) -> Self { self.marker = marker; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceOverrideInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: PerformanceOverrideTypeINTEL, pub enable: Bool32, pub parameter: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceOverrideInfoINTEL<'_> {} unsafe impl Sync for PerformanceOverrideInfoINTEL<'_> {} impl ::core::default::Default for PerformanceOverrideInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: PerformanceOverrideTypeINTEL::default(), enable: Bool32::default(), parameter: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceOverrideInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_OVERRIDE_INFO_INTEL; } impl<'a> PerformanceOverrideInfoINTEL<'a> { #[inline] pub fn ty(mut self, ty: PerformanceOverrideTypeINTEL) -> Self { self.ty = ty; self } #[inline] pub fn enable(mut self, enable: bool) -> Self { self.enable = enable.into(); self } #[inline] pub fn parameter(mut self, parameter: u64) -> Self { self.parameter = parameter; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PerformanceConfigurationAcquireInfoINTEL<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: PerformanceConfigurationTypeINTEL, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PerformanceConfigurationAcquireInfoINTEL<'_> {} unsafe impl Sync for PerformanceConfigurationAcquireInfoINTEL<'_> {} impl ::core::default::Default for PerformanceConfigurationAcquireInfoINTEL<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: PerformanceConfigurationTypeINTEL::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PerformanceConfigurationAcquireInfoINTEL<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL; } impl<'a> PerformanceConfigurationAcquireInfoINTEL<'a> { #[inline] pub fn ty(mut self, ty: PerformanceConfigurationTypeINTEL) -> Self { self.ty = ty; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderClockFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_subgroup_clock: Bool32, pub shader_device_clock: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderClockFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderClockFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderClockFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_subgroup_clock: Bool32::default(), shader_device_clock: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderClockFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderClockFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderClockFeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderClockFeaturesKHR<'a> { #[inline] pub fn shader_subgroup_clock(mut self, shader_subgroup_clock: bool) -> Self { self.shader_subgroup_clock = shader_subgroup_clock.into(); self } #[inline] pub fn shader_device_clock(mut self, shader_device_clock: bool) -> Self { self.shader_device_clock = shader_device_clock.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceIndexTypeUint8FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub index_type_uint8: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceIndexTypeUint8FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceIndexTypeUint8FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceIndexTypeUint8FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), index_type_uint8: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceIndexTypeUint8FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceIndexTypeUint8FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceIndexTypeUint8FeaturesKHR<'_> {} impl<'a> PhysicalDeviceIndexTypeUint8FeaturesKHR<'a> { #[inline] pub fn index_type_uint8(mut self, index_type_uint8: bool) -> Self { self.index_type_uint8 = index_type_uint8.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderSMBuiltinsPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_sm_count: u32, pub shader_warps_per_sm: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderSMBuiltinsPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceShaderSMBuiltinsPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShaderSMBuiltinsPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_sm_count: u32::default(), shader_warps_per_sm: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderSMBuiltinsPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderSMBuiltinsPropertiesNV<'_> {} impl<'a> PhysicalDeviceShaderSMBuiltinsPropertiesNV<'a> { #[inline] pub fn shader_sm_count(mut self, shader_sm_count: u32) -> Self { self.shader_sm_count = shader_sm_count; self } #[inline] pub fn shader_warps_per_sm(mut self, shader_warps_per_sm: u32) -> Self { self.shader_warps_per_sm = shader_warps_per_sm; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderSMBuiltinsFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_sm_builtins: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_sm_builtins: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSMBuiltinsFeaturesNV<'_> {} impl<'a> PhysicalDeviceShaderSMBuiltinsFeaturesNV<'a> { #[inline] pub fn shader_sm_builtins(mut self, shader_sm_builtins: bool) -> Self { self.shader_sm_builtins = shader_sm_builtins.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_shader_sample_interlock: Bool32, pub fragment_shader_pixel_interlock: Bool32, pub fragment_shader_shading_rate_interlock: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_shader_sample_interlock: Bool32::default(), fragment_shader_pixel_interlock: Bool32::default(), fragment_shader_shading_rate_interlock: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'_> {} impl<'a> PhysicalDeviceFragmentShaderInterlockFeaturesEXT<'a> { #[inline] pub fn fragment_shader_sample_interlock( mut self, fragment_shader_sample_interlock: bool, ) -> Self { self.fragment_shader_sample_interlock = fragment_shader_sample_interlock.into(); self } #[inline] pub fn fragment_shader_pixel_interlock( mut self, fragment_shader_pixel_interlock: bool, ) -> Self { self.fragment_shader_pixel_interlock = fragment_shader_pixel_interlock.into(); self } #[inline] pub fn fragment_shader_shading_rate_interlock( mut self, fragment_shader_shading_rate_interlock: bool, ) -> Self { self.fragment_shader_shading_rate_interlock = fragment_shader_shading_rate_interlock.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub separate_depth_stencil_layouts: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'_> {} unsafe impl Sync for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), separate_depth_stencil_layouts: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'_> {} impl<'a> PhysicalDeviceSeparateDepthStencilLayoutsFeatures<'a> { #[inline] pub fn separate_depth_stencil_layouts(mut self, separate_depth_stencil_layouts: bool) -> Self { self.separate_depth_stencil_layouts = separate_depth_stencil_layouts.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AttachmentReferenceStencilLayout<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub stencil_layout: ImageLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AttachmentReferenceStencilLayout<'_> {} unsafe impl Sync for AttachmentReferenceStencilLayout<'_> {} impl ::core::default::Default for AttachmentReferenceStencilLayout<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), stencil_layout: ImageLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AttachmentReferenceStencilLayout<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_REFERENCE_STENCIL_LAYOUT; } unsafe impl ExtendsAttachmentReference2 for AttachmentReferenceStencilLayout<'_> {} impl<'a> AttachmentReferenceStencilLayout<'a> { #[inline] pub fn stencil_layout(mut self, stencil_layout: ImageLayout) -> Self { self.stencil_layout = stencil_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub primitive_topology_list_restart: Bool32, pub primitive_topology_patch_list_restart: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), primitive_topology_list_restart: Bool32::default(), primitive_topology_patch_list_restart: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'_> {} impl<'a> PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT<'a> { #[inline] pub fn primitive_topology_list_restart( mut self, primitive_topology_list_restart: bool, ) -> Self { self.primitive_topology_list_restart = primitive_topology_list_restart.into(); self } #[inline] pub fn primitive_topology_patch_list_restart( mut self, primitive_topology_patch_list_restart: bool, ) -> Self { self.primitive_topology_patch_list_restart = primitive_topology_patch_list_restart.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AttachmentDescriptionStencilLayout<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub stencil_initial_layout: ImageLayout, pub stencil_final_layout: ImageLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AttachmentDescriptionStencilLayout<'_> {} unsafe impl Sync for AttachmentDescriptionStencilLayout<'_> {} impl ::core::default::Default for AttachmentDescriptionStencilLayout<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), stencil_initial_layout: ImageLayout::default(), stencil_final_layout: ImageLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AttachmentDescriptionStencilLayout<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT; } unsafe impl ExtendsAttachmentDescription2 for AttachmentDescriptionStencilLayout<'_> {} impl<'a> AttachmentDescriptionStencilLayout<'a> { #[inline] pub fn stencil_initial_layout(mut self, stencil_initial_layout: ImageLayout) -> Self { self.stencil_initial_layout = stencil_initial_layout; self } #[inline] pub fn stencil_final_layout(mut self, stencil_final_layout: ImageLayout) -> Self { self.stencil_final_layout = stencil_final_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_executable_info: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_executable_info: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'_> {} impl<'a> PhysicalDevicePipelineExecutablePropertiesFeaturesKHR<'a> { #[inline] pub fn pipeline_executable_info(mut self, pipeline_executable_info: bool) -> Self { self.pipeline_executable_info = pipeline_executable_info.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub pipeline: Pipeline, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineInfoKHR<'_> {} unsafe impl Sync for PipelineInfoKHR<'_> {} impl ::core::default::Default for PipelineInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), pipeline: Pipeline::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_INFO_KHR; } impl<'a> PipelineInfoKHR<'a> { #[inline] pub fn pipeline(mut self, pipeline: Pipeline) -> Self { self.pipeline = pipeline; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineExecutablePropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub stages: ShaderStageFlags, pub name: [c_char; MAX_DESCRIPTION_SIZE], pub description: [c_char; MAX_DESCRIPTION_SIZE], pub subgroup_size: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineExecutablePropertiesKHR<'_> {} unsafe impl Sync for PipelineExecutablePropertiesKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PipelineExecutablePropertiesKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PipelineExecutablePropertiesKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("stages", &self.stages) .field("name", &self.name_as_c_str()) .field("description", &self.description_as_c_str()) .field("subgroup_size", &self.subgroup_size) .finish() } } impl ::core::default::Default for PipelineExecutablePropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), stages: ShaderStageFlags::default(), name: unsafe { ::core::mem::zeroed() }, description: unsafe { ::core::mem::zeroed() }, subgroup_size: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineExecutablePropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_PROPERTIES_KHR; } impl<'a> PipelineExecutablePropertiesKHR<'a> { #[inline] pub fn stages(mut self, stages: ShaderStageFlags) -> Self { self.stages = stages; self } #[inline] pub fn name(mut self, name: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.name, name).map(|()| self) } #[inline] pub fn name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.name) } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn subgroup_size(mut self, subgroup_size: u32) -> Self { self.subgroup_size = subgroup_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineExecutableInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub pipeline: Pipeline, pub executable_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineExecutableInfoKHR<'_> {} unsafe impl Sync for PipelineExecutableInfoKHR<'_> {} impl ::core::default::Default for PipelineExecutableInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), pipeline: Pipeline::default(), executable_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineExecutableInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_INFO_KHR; } impl<'a> PipelineExecutableInfoKHR<'a> { #[inline] pub fn pipeline(mut self, pipeline: Pipeline) -> Self { self.pipeline = pipeline; self } #[inline] pub fn executable_index(mut self, executable_index: u32) -> Self { self.executable_index = executable_index; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union PipelineExecutableStatisticValueKHR { pub b32: Bool32, pub i64: i64, pub u64: u64, pub f64: f64, } impl ::core::default::Default for PipelineExecutableStatisticValueKHR { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineExecutableStatisticKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub name: [c_char; MAX_DESCRIPTION_SIZE], pub description: [c_char; MAX_DESCRIPTION_SIZE], pub format: PipelineExecutableStatisticFormatKHR, pub value: PipelineExecutableStatisticValueKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineExecutableStatisticKHR<'_> {} unsafe impl Sync for PipelineExecutableStatisticKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PipelineExecutableStatisticKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PipelineExecutableStatisticKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("name", &self.name_as_c_str()) .field("description", &self.description_as_c_str()) .field("format", &self.format) .field("value", &"union") .finish() } } impl ::core::default::Default for PipelineExecutableStatisticKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), name: unsafe { ::core::mem::zeroed() }, description: unsafe { ::core::mem::zeroed() }, format: PipelineExecutableStatisticFormatKHR::default(), value: PipelineExecutableStatisticValueKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineExecutableStatisticKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_STATISTIC_KHR; } impl<'a> PipelineExecutableStatisticKHR<'a> { #[inline] pub fn name(mut self, name: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.name, name).map(|()| self) } #[inline] pub fn name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.name) } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn format(mut self, format: PipelineExecutableStatisticFormatKHR) -> Self { self.format = format; self } #[inline] pub fn value(mut self, value: PipelineExecutableStatisticValueKHR) -> Self { self.value = value; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineExecutableInternalRepresentationKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub name: [c_char; MAX_DESCRIPTION_SIZE], pub description: [c_char; MAX_DESCRIPTION_SIZE], pub is_text: Bool32, pub data_size: usize, pub p_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineExecutableInternalRepresentationKHR<'_> {} unsafe impl Sync for PipelineExecutableInternalRepresentationKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PipelineExecutableInternalRepresentationKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PipelineExecutableInternalRepresentationKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("name", &self.name_as_c_str()) .field("description", &self.description_as_c_str()) .field("is_text", &self.is_text) .field("data_size", &self.data_size) .field("p_data", &self.p_data) .finish() } } impl ::core::default::Default for PipelineExecutableInternalRepresentationKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), name: unsafe { ::core::mem::zeroed() }, description: unsafe { ::core::mem::zeroed() }, is_text: Bool32::default(), data_size: usize::default(), p_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineExecutableInternalRepresentationKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR; } impl<'a> PipelineExecutableInternalRepresentationKHR<'a> { #[inline] pub fn name(mut self, name: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.name, name).map(|()| self) } #[inline] pub fn name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.name) } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn is_text(mut self, is_text: bool) -> Self { self.is_text = is_text.into(); self } #[inline] pub fn data(mut self, data: &'a mut [u8]) -> Self { self.data_size = data.len(); self.p_data = data.as_mut_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_demote_to_helper_invocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'_> {} unsafe impl Sync for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_demote_to_helper_invocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'_> {} impl<'a> PhysicalDeviceShaderDemoteToHelperInvocationFeatures<'a> { #[inline] pub fn shader_demote_to_helper_invocation( mut self, shader_demote_to_helper_invocation: bool, ) -> Self { self.shader_demote_to_helper_invocation = shader_demote_to_helper_invocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub texel_buffer_alignment: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), texel_buffer_alignment: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'_> {} impl<'a> PhysicalDeviceTexelBufferAlignmentFeaturesEXT<'a> { #[inline] pub fn texel_buffer_alignment(mut self, texel_buffer_alignment: bool) -> Self { self.texel_buffer_alignment = texel_buffer_alignment.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTexelBufferAlignmentProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub storage_texel_buffer_offset_alignment_bytes: DeviceSize, pub storage_texel_buffer_offset_single_texel_alignment: Bool32, pub uniform_texel_buffer_offset_alignment_bytes: DeviceSize, pub uniform_texel_buffer_offset_single_texel_alignment: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTexelBufferAlignmentProperties<'_> {} unsafe impl Sync for PhysicalDeviceTexelBufferAlignmentProperties<'_> {} impl ::core::default::Default for PhysicalDeviceTexelBufferAlignmentProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), storage_texel_buffer_offset_alignment_bytes: DeviceSize::default(), storage_texel_buffer_offset_single_texel_alignment: Bool32::default(), uniform_texel_buffer_offset_alignment_bytes: DeviceSize::default(), uniform_texel_buffer_offset_single_texel_alignment: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTexelBufferAlignmentProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTexelBufferAlignmentProperties<'_> {} impl<'a> PhysicalDeviceTexelBufferAlignmentProperties<'a> { #[inline] pub fn storage_texel_buffer_offset_alignment_bytes( mut self, storage_texel_buffer_offset_alignment_bytes: DeviceSize, ) -> Self { self.storage_texel_buffer_offset_alignment_bytes = storage_texel_buffer_offset_alignment_bytes; self } #[inline] pub fn storage_texel_buffer_offset_single_texel_alignment( mut self, storage_texel_buffer_offset_single_texel_alignment: bool, ) -> Self { self.storage_texel_buffer_offset_single_texel_alignment = storage_texel_buffer_offset_single_texel_alignment.into(); self } #[inline] pub fn uniform_texel_buffer_offset_alignment_bytes( mut self, uniform_texel_buffer_offset_alignment_bytes: DeviceSize, ) -> Self { self.uniform_texel_buffer_offset_alignment_bytes = uniform_texel_buffer_offset_alignment_bytes; self } #[inline] pub fn uniform_texel_buffer_offset_single_texel_alignment( mut self, uniform_texel_buffer_offset_single_texel_alignment: bool, ) -> Self { self.uniform_texel_buffer_offset_single_texel_alignment = uniform_texel_buffer_offset_single_texel_alignment.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubgroupSizeControlFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subgroup_size_control: Bool32, pub compute_full_subgroups: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubgroupSizeControlFeatures<'_> {} unsafe impl Sync for PhysicalDeviceSubgroupSizeControlFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceSubgroupSizeControlFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subgroup_size_control: Bool32::default(), compute_full_subgroups: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubgroupSizeControlFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubgroupSizeControlFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubgroupSizeControlFeatures<'_> {} impl<'a> PhysicalDeviceSubgroupSizeControlFeatures<'a> { #[inline] pub fn subgroup_size_control(mut self, subgroup_size_control: bool) -> Self { self.subgroup_size_control = subgroup_size_control.into(); self } #[inline] pub fn compute_full_subgroups(mut self, compute_full_subgroups: bool) -> Self { self.compute_full_subgroups = compute_full_subgroups.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubgroupSizeControlProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_subgroup_size: u32, pub max_subgroup_size: u32, pub max_compute_workgroup_subgroups: u32, pub required_subgroup_size_stages: ShaderStageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubgroupSizeControlProperties<'_> {} unsafe impl Sync for PhysicalDeviceSubgroupSizeControlProperties<'_> {} impl ::core::default::Default for PhysicalDeviceSubgroupSizeControlProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_subgroup_size: u32::default(), max_subgroup_size: u32::default(), max_compute_workgroup_subgroups: u32::default(), required_subgroup_size_stages: ShaderStageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubgroupSizeControlProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupSizeControlProperties<'_> {} impl<'a> PhysicalDeviceSubgroupSizeControlProperties<'a> { #[inline] pub fn min_subgroup_size(mut self, min_subgroup_size: u32) -> Self { self.min_subgroup_size = min_subgroup_size; self } #[inline] pub fn max_subgroup_size(mut self, max_subgroup_size: u32) -> Self { self.max_subgroup_size = max_subgroup_size; self } #[inline] pub fn max_compute_workgroup_subgroups(mut self, max_compute_workgroup_subgroups: u32) -> Self { self.max_compute_workgroup_subgroups = max_compute_workgroup_subgroups; self } #[inline] pub fn required_subgroup_size_stages( mut self, required_subgroup_size_stages: ShaderStageFlags, ) -> Self { self.required_subgroup_size_stages = required_subgroup_size_stages; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineShaderStageRequiredSubgroupSizeCreateInfo<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub required_subgroup_size: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'_> {} unsafe impl Sync for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'_> {} impl ::core::default::Default for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), required_subgroup_size: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO; } unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'_> { } unsafe impl ExtendsShaderCreateInfoEXT for PipelineShaderStageRequiredSubgroupSizeCreateInfo<'_> {} impl<'a> PipelineShaderStageRequiredSubgroupSizeCreateInfo<'a> { #[inline] pub fn required_subgroup_size(mut self, required_subgroup_size: u32) -> Self { self.required_subgroup_size = required_subgroup_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassShadingPipelineCreateInfoHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub render_pass: RenderPass, pub subpass: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassShadingPipelineCreateInfoHUAWEI<'_> {} unsafe impl Sync for SubpassShadingPipelineCreateInfoHUAWEI<'_> {} impl ::core::default::Default for SubpassShadingPipelineCreateInfoHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), render_pass: RenderPass::default(), subpass: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassShadingPipelineCreateInfoHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI; } unsafe impl ExtendsComputePipelineCreateInfo for SubpassShadingPipelineCreateInfoHUAWEI<'_> {} impl<'a> SubpassShadingPipelineCreateInfoHUAWEI<'a> { #[inline] pub fn render_pass(mut self, render_pass: RenderPass) -> Self { self.render_pass = render_pass; self } #[inline] pub fn subpass(mut self, subpass: u32) -> Self { self.subpass = subpass; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubpassShadingPropertiesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_subpass_shading_workgroup_size_aspect_ratio: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubpassShadingPropertiesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceSubpassShadingPropertiesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceSubpassShadingPropertiesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_subpass_shading_workgroup_size_aspect_ratio: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubpassShadingPropertiesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubpassShadingPropertiesHUAWEI<'_> {} impl<'a> PhysicalDeviceSubpassShadingPropertiesHUAWEI<'a> { #[inline] pub fn max_subpass_shading_workgroup_size_aspect_ratio( mut self, max_subpass_shading_workgroup_size_aspect_ratio: u32, ) -> Self { self.max_subpass_shading_workgroup_size_aspect_ratio = max_subpass_shading_workgroup_size_aspect_ratio; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_work_group_count: [u32; 3], pub max_work_group_size: [u32; 3], pub max_output_cluster_count: u32, pub indirect_buffer_offset_alignment: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_work_group_count: unsafe { ::core::mem::zeroed() }, max_work_group_size: unsafe { ::core::mem::zeroed() }, max_output_cluster_count: u32::default(), indirect_buffer_offset_alignment: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'_> { } impl<'a> PhysicalDeviceClusterCullingShaderPropertiesHUAWEI<'a> { #[inline] pub fn max_work_group_count(mut self, max_work_group_count: [u32; 3]) -> Self { self.max_work_group_count = max_work_group_count; self } #[inline] pub fn max_work_group_size(mut self, max_work_group_size: [u32; 3]) -> Self { self.max_work_group_size = max_work_group_size; self } #[inline] pub fn max_output_cluster_count(mut self, max_output_cluster_count: u32) -> Self { self.max_output_cluster_count = max_output_cluster_count; self } #[inline] pub fn indirect_buffer_offset_alignment( mut self, indirect_buffer_offset_alignment: DeviceSize, ) -> Self { self.indirect_buffer_offset_alignment = indirect_buffer_offset_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryOpaqueCaptureAddressAllocateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub opaque_capture_address: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryOpaqueCaptureAddressAllocateInfo<'_> {} unsafe impl Sync for MemoryOpaqueCaptureAddressAllocateInfo<'_> {} impl ::core::default::Default for MemoryOpaqueCaptureAddressAllocateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), opaque_capture_address: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryOpaqueCaptureAddressAllocateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO; } unsafe impl ExtendsMemoryAllocateInfo for MemoryOpaqueCaptureAddressAllocateInfo<'_> {} impl<'a> MemoryOpaqueCaptureAddressAllocateInfo<'a> { #[inline] pub fn opaque_capture_address(mut self, opaque_capture_address: u64) -> Self { self.opaque_capture_address = opaque_capture_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceMemoryOpaqueCaptureAddressInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceMemoryOpaqueCaptureAddressInfo<'_> {} unsafe impl Sync for DeviceMemoryOpaqueCaptureAddressInfo<'_> {} impl ::core::default::Default for DeviceMemoryOpaqueCaptureAddressInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceMemoryOpaqueCaptureAddressInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO; } impl<'a> DeviceMemoryOpaqueCaptureAddressInfo<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLineRasterizationFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub rectangular_lines: Bool32, pub bresenham_lines: Bool32, pub smooth_lines: Bool32, pub stippled_rectangular_lines: Bool32, pub stippled_bresenham_lines: Bool32, pub stippled_smooth_lines: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceLineRasterizationFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceLineRasterizationFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceLineRasterizationFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), rectangular_lines: Bool32::default(), bresenham_lines: Bool32::default(), smooth_lines: Bool32::default(), stippled_rectangular_lines: Bool32::default(), stippled_bresenham_lines: Bool32::default(), stippled_smooth_lines: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceLineRasterizationFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLineRasterizationFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLineRasterizationFeaturesKHR<'_> {} impl<'a> PhysicalDeviceLineRasterizationFeaturesKHR<'a> { #[inline] pub fn rectangular_lines(mut self, rectangular_lines: bool) -> Self { self.rectangular_lines = rectangular_lines.into(); self } #[inline] pub fn bresenham_lines(mut self, bresenham_lines: bool) -> Self { self.bresenham_lines = bresenham_lines.into(); self } #[inline] pub fn smooth_lines(mut self, smooth_lines: bool) -> Self { self.smooth_lines = smooth_lines.into(); self } #[inline] pub fn stippled_rectangular_lines(mut self, stippled_rectangular_lines: bool) -> Self { self.stippled_rectangular_lines = stippled_rectangular_lines.into(); self } #[inline] pub fn stippled_bresenham_lines(mut self, stippled_bresenham_lines: bool) -> Self { self.stippled_bresenham_lines = stippled_bresenham_lines.into(); self } #[inline] pub fn stippled_smooth_lines(mut self, stippled_smooth_lines: bool) -> Self { self.stippled_smooth_lines = stippled_smooth_lines.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLineRasterizationPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub line_sub_pixel_precision_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceLineRasterizationPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceLineRasterizationPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceLineRasterizationPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), line_sub_pixel_precision_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceLineRasterizationPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceLineRasterizationPropertiesKHR<'_> {} impl<'a> PhysicalDeviceLineRasterizationPropertiesKHR<'a> { #[inline] pub fn line_sub_pixel_precision_bits(mut self, line_sub_pixel_precision_bits: u32) -> Self { self.line_sub_pixel_precision_bits = line_sub_pixel_precision_bits; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationLineStateCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub line_rasterization_mode: LineRasterizationModeKHR, pub stippled_line_enable: Bool32, pub line_stipple_factor: u32, pub line_stipple_pattern: u16, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationLineStateCreateInfoKHR<'_> {} unsafe impl Sync for PipelineRasterizationLineStateCreateInfoKHR<'_> {} impl ::core::default::Default for PipelineRasterizationLineStateCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), line_rasterization_mode: LineRasterizationModeKHR::default(), stippled_line_enable: Bool32::default(), line_stipple_factor: u32::default(), line_stipple_pattern: u16::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationLineStateCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationLineStateCreateInfoKHR<'_> { } impl<'a> PipelineRasterizationLineStateCreateInfoKHR<'a> { #[inline] pub fn line_rasterization_mode( mut self, line_rasterization_mode: LineRasterizationModeKHR, ) -> Self { self.line_rasterization_mode = line_rasterization_mode; self } #[inline] pub fn stippled_line_enable(mut self, stippled_line_enable: bool) -> Self { self.stippled_line_enable = stippled_line_enable.into(); self } #[inline] pub fn line_stipple_factor(mut self, line_stipple_factor: u32) -> Self { self.line_stipple_factor = line_stipple_factor; self } #[inline] pub fn line_stipple_pattern(mut self, line_stipple_pattern: u16) -> Self { self.line_stipple_pattern = line_stipple_pattern; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineCreationCacheControlFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_creation_cache_control: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineCreationCacheControlFeatures<'_> {} unsafe impl Sync for PhysicalDevicePipelineCreationCacheControlFeatures<'_> {} impl ::core::default::Default for PhysicalDevicePipelineCreationCacheControlFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_creation_cache_control: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineCreationCacheControlFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineCreationCacheControlFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineCreationCacheControlFeatures<'_> {} impl<'a> PhysicalDevicePipelineCreationCacheControlFeatures<'a> { #[inline] pub fn pipeline_creation_cache_control( mut self, pipeline_creation_cache_control: bool, ) -> Self { self.pipeline_creation_cache_control = pipeline_creation_cache_control.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan11Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub storage_buffer16_bit_access: Bool32, pub uniform_and_storage_buffer16_bit_access: Bool32, pub storage_push_constant16: Bool32, pub storage_input_output16: Bool32, pub multiview: Bool32, pub multiview_geometry_shader: Bool32, pub multiview_tessellation_shader: Bool32, pub variable_pointers_storage_buffer: Bool32, pub variable_pointers: Bool32, pub protected_memory: Bool32, pub sampler_ycbcr_conversion: Bool32, pub shader_draw_parameters: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan11Features<'_> {} unsafe impl Sync for PhysicalDeviceVulkan11Features<'_> {} impl ::core::default::Default for PhysicalDeviceVulkan11Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), storage_buffer16_bit_access: Bool32::default(), uniform_and_storage_buffer16_bit_access: Bool32::default(), storage_push_constant16: Bool32::default(), storage_input_output16: Bool32::default(), multiview: Bool32::default(), multiview_geometry_shader: Bool32::default(), multiview_tessellation_shader: Bool32::default(), variable_pointers_storage_buffer: Bool32::default(), variable_pointers: Bool32::default(), protected_memory: Bool32::default(), sampler_ycbcr_conversion: Bool32::default(), shader_draw_parameters: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan11Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_1_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan11Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan11Features<'_> {} impl<'a> PhysicalDeviceVulkan11Features<'a> { #[inline] pub fn storage_buffer16_bit_access(mut self, storage_buffer16_bit_access: bool) -> Self { self.storage_buffer16_bit_access = storage_buffer16_bit_access.into(); self } #[inline] pub fn uniform_and_storage_buffer16_bit_access( mut self, uniform_and_storage_buffer16_bit_access: bool, ) -> Self { self.uniform_and_storage_buffer16_bit_access = uniform_and_storage_buffer16_bit_access.into(); self } #[inline] pub fn storage_push_constant16(mut self, storage_push_constant16: bool) -> Self { self.storage_push_constant16 = storage_push_constant16.into(); self } #[inline] pub fn storage_input_output16(mut self, storage_input_output16: bool) -> Self { self.storage_input_output16 = storage_input_output16.into(); self } #[inline] pub fn multiview(mut self, multiview: bool) -> Self { self.multiview = multiview.into(); self } #[inline] pub fn multiview_geometry_shader(mut self, multiview_geometry_shader: bool) -> Self { self.multiview_geometry_shader = multiview_geometry_shader.into(); self } #[inline] pub fn multiview_tessellation_shader(mut self, multiview_tessellation_shader: bool) -> Self { self.multiview_tessellation_shader = multiview_tessellation_shader.into(); self } #[inline] pub fn variable_pointers_storage_buffer( mut self, variable_pointers_storage_buffer: bool, ) -> Self { self.variable_pointers_storage_buffer = variable_pointers_storage_buffer.into(); self } #[inline] pub fn variable_pointers(mut self, variable_pointers: bool) -> Self { self.variable_pointers = variable_pointers.into(); self } #[inline] pub fn protected_memory(mut self, protected_memory: bool) -> Self { self.protected_memory = protected_memory.into(); self } #[inline] pub fn sampler_ycbcr_conversion(mut self, sampler_ycbcr_conversion: bool) -> Self { self.sampler_ycbcr_conversion = sampler_ycbcr_conversion.into(); self } #[inline] pub fn shader_draw_parameters(mut self, shader_draw_parameters: bool) -> Self { self.shader_draw_parameters = shader_draw_parameters.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan11Properties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_uuid: [u8; UUID_SIZE], pub driver_uuid: [u8; UUID_SIZE], pub device_luid: [u8; LUID_SIZE], pub device_node_mask: u32, pub device_luid_valid: Bool32, pub subgroup_size: u32, pub subgroup_supported_stages: ShaderStageFlags, pub subgroup_supported_operations: SubgroupFeatureFlags, pub subgroup_quad_operations_in_all_stages: Bool32, pub point_clipping_behavior: PointClippingBehavior, pub max_multiview_view_count: u32, pub max_multiview_instance_index: u32, pub protected_no_fault: Bool32, pub max_per_set_descriptors: u32, pub max_memory_allocation_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan11Properties<'_> {} unsafe impl Sync for PhysicalDeviceVulkan11Properties<'_> {} impl ::core::default::Default for PhysicalDeviceVulkan11Properties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_uuid: unsafe { ::core::mem::zeroed() }, driver_uuid: unsafe { ::core::mem::zeroed() }, device_luid: unsafe { ::core::mem::zeroed() }, device_node_mask: u32::default(), device_luid_valid: Bool32::default(), subgroup_size: u32::default(), subgroup_supported_stages: ShaderStageFlags::default(), subgroup_supported_operations: SubgroupFeatureFlags::default(), subgroup_quad_operations_in_all_stages: Bool32::default(), point_clipping_behavior: PointClippingBehavior::default(), max_multiview_view_count: u32::default(), max_multiview_instance_index: u32::default(), protected_no_fault: Bool32::default(), max_per_set_descriptors: u32::default(), max_memory_allocation_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan11Properties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan11Properties<'_> {} impl<'a> PhysicalDeviceVulkan11Properties<'a> { #[inline] pub fn device_uuid(mut self, device_uuid: [u8; UUID_SIZE]) -> Self { self.device_uuid = device_uuid; self } #[inline] pub fn driver_uuid(mut self, driver_uuid: [u8; UUID_SIZE]) -> Self { self.driver_uuid = driver_uuid; self } #[inline] pub fn device_luid(mut self, device_luid: [u8; LUID_SIZE]) -> Self { self.device_luid = device_luid; self } #[inline] pub fn device_node_mask(mut self, device_node_mask: u32) -> Self { self.device_node_mask = device_node_mask; self } #[inline] pub fn device_luid_valid(mut self, device_luid_valid: bool) -> Self { self.device_luid_valid = device_luid_valid.into(); self } #[inline] pub fn subgroup_size(mut self, subgroup_size: u32) -> Self { self.subgroup_size = subgroup_size; self } #[inline] pub fn subgroup_supported_stages( mut self, subgroup_supported_stages: ShaderStageFlags, ) -> Self { self.subgroup_supported_stages = subgroup_supported_stages; self } #[inline] pub fn subgroup_supported_operations( mut self, subgroup_supported_operations: SubgroupFeatureFlags, ) -> Self { self.subgroup_supported_operations = subgroup_supported_operations; self } #[inline] pub fn subgroup_quad_operations_in_all_stages( mut self, subgroup_quad_operations_in_all_stages: bool, ) -> Self { self.subgroup_quad_operations_in_all_stages = subgroup_quad_operations_in_all_stages.into(); self } #[inline] pub fn point_clipping_behavior( mut self, point_clipping_behavior: PointClippingBehavior, ) -> Self { self.point_clipping_behavior = point_clipping_behavior; self } #[inline] pub fn max_multiview_view_count(mut self, max_multiview_view_count: u32) -> Self { self.max_multiview_view_count = max_multiview_view_count; self } #[inline] pub fn max_multiview_instance_index(mut self, max_multiview_instance_index: u32) -> Self { self.max_multiview_instance_index = max_multiview_instance_index; self } #[inline] pub fn protected_no_fault(mut self, protected_no_fault: bool) -> Self { self.protected_no_fault = protected_no_fault.into(); self } #[inline] pub fn max_per_set_descriptors(mut self, max_per_set_descriptors: u32) -> Self { self.max_per_set_descriptors = max_per_set_descriptors; self } #[inline] pub fn max_memory_allocation_size(mut self, max_memory_allocation_size: DeviceSize) -> Self { self.max_memory_allocation_size = max_memory_allocation_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan12Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub sampler_mirror_clamp_to_edge: Bool32, pub draw_indirect_count: Bool32, pub storage_buffer8_bit_access: Bool32, pub uniform_and_storage_buffer8_bit_access: Bool32, pub storage_push_constant8: Bool32, pub shader_buffer_int64_atomics: Bool32, pub shader_shared_int64_atomics: Bool32, pub shader_float16: Bool32, pub shader_int8: Bool32, pub descriptor_indexing: Bool32, pub shader_input_attachment_array_dynamic_indexing: Bool32, pub shader_uniform_texel_buffer_array_dynamic_indexing: Bool32, pub shader_storage_texel_buffer_array_dynamic_indexing: Bool32, pub shader_uniform_buffer_array_non_uniform_indexing: Bool32, pub shader_sampled_image_array_non_uniform_indexing: Bool32, pub shader_storage_buffer_array_non_uniform_indexing: Bool32, pub shader_storage_image_array_non_uniform_indexing: Bool32, pub shader_input_attachment_array_non_uniform_indexing: Bool32, pub shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32, pub shader_storage_texel_buffer_array_non_uniform_indexing: Bool32, pub descriptor_binding_uniform_buffer_update_after_bind: Bool32, pub descriptor_binding_sampled_image_update_after_bind: Bool32, pub descriptor_binding_storage_image_update_after_bind: Bool32, pub descriptor_binding_storage_buffer_update_after_bind: Bool32, pub descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32, pub descriptor_binding_storage_texel_buffer_update_after_bind: Bool32, pub descriptor_binding_update_unused_while_pending: Bool32, pub descriptor_binding_partially_bound: Bool32, pub descriptor_binding_variable_descriptor_count: Bool32, pub runtime_descriptor_array: Bool32, pub sampler_filter_minmax: Bool32, pub scalar_block_layout: Bool32, pub imageless_framebuffer: Bool32, pub uniform_buffer_standard_layout: Bool32, pub shader_subgroup_extended_types: Bool32, pub separate_depth_stencil_layouts: Bool32, pub host_query_reset: Bool32, pub timeline_semaphore: Bool32, pub buffer_device_address: Bool32, pub buffer_device_address_capture_replay: Bool32, pub buffer_device_address_multi_device: Bool32, pub vulkan_memory_model: Bool32, pub vulkan_memory_model_device_scope: Bool32, pub vulkan_memory_model_availability_visibility_chains: Bool32, pub shader_output_viewport_index: Bool32, pub shader_output_layer: Bool32, pub subgroup_broadcast_dynamic_id: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan12Features<'_> {} unsafe impl Sync for PhysicalDeviceVulkan12Features<'_> {} impl ::core::default::Default for PhysicalDeviceVulkan12Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), sampler_mirror_clamp_to_edge: Bool32::default(), draw_indirect_count: Bool32::default(), storage_buffer8_bit_access: Bool32::default(), uniform_and_storage_buffer8_bit_access: Bool32::default(), storage_push_constant8: Bool32::default(), shader_buffer_int64_atomics: Bool32::default(), shader_shared_int64_atomics: Bool32::default(), shader_float16: Bool32::default(), shader_int8: Bool32::default(), descriptor_indexing: Bool32::default(), shader_input_attachment_array_dynamic_indexing: Bool32::default(), shader_uniform_texel_buffer_array_dynamic_indexing: Bool32::default(), shader_storage_texel_buffer_array_dynamic_indexing: Bool32::default(), shader_uniform_buffer_array_non_uniform_indexing: Bool32::default(), shader_sampled_image_array_non_uniform_indexing: Bool32::default(), shader_storage_buffer_array_non_uniform_indexing: Bool32::default(), shader_storage_image_array_non_uniform_indexing: Bool32::default(), shader_input_attachment_array_non_uniform_indexing: Bool32::default(), shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32::default(), shader_storage_texel_buffer_array_non_uniform_indexing: Bool32::default(), descriptor_binding_uniform_buffer_update_after_bind: Bool32::default(), descriptor_binding_sampled_image_update_after_bind: Bool32::default(), descriptor_binding_storage_image_update_after_bind: Bool32::default(), descriptor_binding_storage_buffer_update_after_bind: Bool32::default(), descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32::default(), descriptor_binding_storage_texel_buffer_update_after_bind: Bool32::default(), descriptor_binding_update_unused_while_pending: Bool32::default(), descriptor_binding_partially_bound: Bool32::default(), descriptor_binding_variable_descriptor_count: Bool32::default(), runtime_descriptor_array: Bool32::default(), sampler_filter_minmax: Bool32::default(), scalar_block_layout: Bool32::default(), imageless_framebuffer: Bool32::default(), uniform_buffer_standard_layout: Bool32::default(), shader_subgroup_extended_types: Bool32::default(), separate_depth_stencil_layouts: Bool32::default(), host_query_reset: Bool32::default(), timeline_semaphore: Bool32::default(), buffer_device_address: Bool32::default(), buffer_device_address_capture_replay: Bool32::default(), buffer_device_address_multi_device: Bool32::default(), vulkan_memory_model: Bool32::default(), vulkan_memory_model_device_scope: Bool32::default(), vulkan_memory_model_availability_visibility_chains: Bool32::default(), shader_output_viewport_index: Bool32::default(), shader_output_layer: Bool32::default(), subgroup_broadcast_dynamic_id: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan12Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_2_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan12Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan12Features<'_> {} impl<'a> PhysicalDeviceVulkan12Features<'a> { #[inline] pub fn sampler_mirror_clamp_to_edge(mut self, sampler_mirror_clamp_to_edge: bool) -> Self { self.sampler_mirror_clamp_to_edge = sampler_mirror_clamp_to_edge.into(); self } #[inline] pub fn draw_indirect_count(mut self, draw_indirect_count: bool) -> Self { self.draw_indirect_count = draw_indirect_count.into(); self } #[inline] pub fn storage_buffer8_bit_access(mut self, storage_buffer8_bit_access: bool) -> Self { self.storage_buffer8_bit_access = storage_buffer8_bit_access.into(); self } #[inline] pub fn uniform_and_storage_buffer8_bit_access( mut self, uniform_and_storage_buffer8_bit_access: bool, ) -> Self { self.uniform_and_storage_buffer8_bit_access = uniform_and_storage_buffer8_bit_access.into(); self } #[inline] pub fn storage_push_constant8(mut self, storage_push_constant8: bool) -> Self { self.storage_push_constant8 = storage_push_constant8.into(); self } #[inline] pub fn shader_buffer_int64_atomics(mut self, shader_buffer_int64_atomics: bool) -> Self { self.shader_buffer_int64_atomics = shader_buffer_int64_atomics.into(); self } #[inline] pub fn shader_shared_int64_atomics(mut self, shader_shared_int64_atomics: bool) -> Self { self.shader_shared_int64_atomics = shader_shared_int64_atomics.into(); self } #[inline] pub fn shader_float16(mut self, shader_float16: bool) -> Self { self.shader_float16 = shader_float16.into(); self } #[inline] pub fn shader_int8(mut self, shader_int8: bool) -> Self { self.shader_int8 = shader_int8.into(); self } #[inline] pub fn descriptor_indexing(mut self, descriptor_indexing: bool) -> Self { self.descriptor_indexing = descriptor_indexing.into(); self } #[inline] pub fn shader_input_attachment_array_dynamic_indexing( mut self, shader_input_attachment_array_dynamic_indexing: bool, ) -> Self { self.shader_input_attachment_array_dynamic_indexing = shader_input_attachment_array_dynamic_indexing.into(); self } #[inline] pub fn shader_uniform_texel_buffer_array_dynamic_indexing( mut self, shader_uniform_texel_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_uniform_texel_buffer_array_dynamic_indexing = shader_uniform_texel_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_storage_texel_buffer_array_dynamic_indexing( mut self, shader_storage_texel_buffer_array_dynamic_indexing: bool, ) -> Self { self.shader_storage_texel_buffer_array_dynamic_indexing = shader_storage_texel_buffer_array_dynamic_indexing.into(); self } #[inline] pub fn shader_uniform_buffer_array_non_uniform_indexing( mut self, shader_uniform_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_uniform_buffer_array_non_uniform_indexing = shader_uniform_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_sampled_image_array_non_uniform_indexing( mut self, shader_sampled_image_array_non_uniform_indexing: bool, ) -> Self { self.shader_sampled_image_array_non_uniform_indexing = shader_sampled_image_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_buffer_array_non_uniform_indexing( mut self, shader_storage_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_buffer_array_non_uniform_indexing = shader_storage_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_image_array_non_uniform_indexing( mut self, shader_storage_image_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_image_array_non_uniform_indexing = shader_storage_image_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_input_attachment_array_non_uniform_indexing( mut self, shader_input_attachment_array_non_uniform_indexing: bool, ) -> Self { self.shader_input_attachment_array_non_uniform_indexing = shader_input_attachment_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_uniform_texel_buffer_array_non_uniform_indexing( mut self, shader_uniform_texel_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_uniform_texel_buffer_array_non_uniform_indexing = shader_uniform_texel_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn shader_storage_texel_buffer_array_non_uniform_indexing( mut self, shader_storage_texel_buffer_array_non_uniform_indexing: bool, ) -> Self { self.shader_storage_texel_buffer_array_non_uniform_indexing = shader_storage_texel_buffer_array_non_uniform_indexing.into(); self } #[inline] pub fn descriptor_binding_uniform_buffer_update_after_bind( mut self, descriptor_binding_uniform_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_uniform_buffer_update_after_bind = descriptor_binding_uniform_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_sampled_image_update_after_bind( mut self, descriptor_binding_sampled_image_update_after_bind: bool, ) -> Self { self.descriptor_binding_sampled_image_update_after_bind = descriptor_binding_sampled_image_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_image_update_after_bind( mut self, descriptor_binding_storage_image_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_image_update_after_bind = descriptor_binding_storage_image_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_buffer_update_after_bind( mut self, descriptor_binding_storage_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_buffer_update_after_bind = descriptor_binding_storage_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_uniform_texel_buffer_update_after_bind( mut self, descriptor_binding_uniform_texel_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_uniform_texel_buffer_update_after_bind = descriptor_binding_uniform_texel_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_storage_texel_buffer_update_after_bind( mut self, descriptor_binding_storage_texel_buffer_update_after_bind: bool, ) -> Self { self.descriptor_binding_storage_texel_buffer_update_after_bind = descriptor_binding_storage_texel_buffer_update_after_bind.into(); self } #[inline] pub fn descriptor_binding_update_unused_while_pending( mut self, descriptor_binding_update_unused_while_pending: bool, ) -> Self { self.descriptor_binding_update_unused_while_pending = descriptor_binding_update_unused_while_pending.into(); self } #[inline] pub fn descriptor_binding_partially_bound( mut self, descriptor_binding_partially_bound: bool, ) -> Self { self.descriptor_binding_partially_bound = descriptor_binding_partially_bound.into(); self } #[inline] pub fn descriptor_binding_variable_descriptor_count( mut self, descriptor_binding_variable_descriptor_count: bool, ) -> Self { self.descriptor_binding_variable_descriptor_count = descriptor_binding_variable_descriptor_count.into(); self } #[inline] pub fn runtime_descriptor_array(mut self, runtime_descriptor_array: bool) -> Self { self.runtime_descriptor_array = runtime_descriptor_array.into(); self } #[inline] pub fn sampler_filter_minmax(mut self, sampler_filter_minmax: bool) -> Self { self.sampler_filter_minmax = sampler_filter_minmax.into(); self } #[inline] pub fn scalar_block_layout(mut self, scalar_block_layout: bool) -> Self { self.scalar_block_layout = scalar_block_layout.into(); self } #[inline] pub fn imageless_framebuffer(mut self, imageless_framebuffer: bool) -> Self { self.imageless_framebuffer = imageless_framebuffer.into(); self } #[inline] pub fn uniform_buffer_standard_layout(mut self, uniform_buffer_standard_layout: bool) -> Self { self.uniform_buffer_standard_layout = uniform_buffer_standard_layout.into(); self } #[inline] pub fn shader_subgroup_extended_types(mut self, shader_subgroup_extended_types: bool) -> Self { self.shader_subgroup_extended_types = shader_subgroup_extended_types.into(); self } #[inline] pub fn separate_depth_stencil_layouts(mut self, separate_depth_stencil_layouts: bool) -> Self { self.separate_depth_stencil_layouts = separate_depth_stencil_layouts.into(); self } #[inline] pub fn host_query_reset(mut self, host_query_reset: bool) -> Self { self.host_query_reset = host_query_reset.into(); self } #[inline] pub fn timeline_semaphore(mut self, timeline_semaphore: bool) -> Self { self.timeline_semaphore = timeline_semaphore.into(); self } #[inline] pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self { self.buffer_device_address = buffer_device_address.into(); self } #[inline] pub fn buffer_device_address_capture_replay( mut self, buffer_device_address_capture_replay: bool, ) -> Self { self.buffer_device_address_capture_replay = buffer_device_address_capture_replay.into(); self } #[inline] pub fn buffer_device_address_multi_device( mut self, buffer_device_address_multi_device: bool, ) -> Self { self.buffer_device_address_multi_device = buffer_device_address_multi_device.into(); self } #[inline] pub fn vulkan_memory_model(mut self, vulkan_memory_model: bool) -> Self { self.vulkan_memory_model = vulkan_memory_model.into(); self } #[inline] pub fn vulkan_memory_model_device_scope( mut self, vulkan_memory_model_device_scope: bool, ) -> Self { self.vulkan_memory_model_device_scope = vulkan_memory_model_device_scope.into(); self } #[inline] pub fn vulkan_memory_model_availability_visibility_chains( mut self, vulkan_memory_model_availability_visibility_chains: bool, ) -> Self { self.vulkan_memory_model_availability_visibility_chains = vulkan_memory_model_availability_visibility_chains.into(); self } #[inline] pub fn shader_output_viewport_index(mut self, shader_output_viewport_index: bool) -> Self { self.shader_output_viewport_index = shader_output_viewport_index.into(); self } #[inline] pub fn shader_output_layer(mut self, shader_output_layer: bool) -> Self { self.shader_output_layer = shader_output_layer.into(); self } #[inline] pub fn subgroup_broadcast_dynamic_id(mut self, subgroup_broadcast_dynamic_id: bool) -> Self { self.subgroup_broadcast_dynamic_id = subgroup_broadcast_dynamic_id.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan12Properties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub driver_id: DriverId, pub driver_name: [c_char; MAX_DRIVER_NAME_SIZE], pub driver_info: [c_char; MAX_DRIVER_INFO_SIZE], pub conformance_version: ConformanceVersion, pub denorm_behavior_independence: ShaderFloatControlsIndependence, pub rounding_mode_independence: ShaderFloatControlsIndependence, pub shader_signed_zero_inf_nan_preserve_float16: Bool32, pub shader_signed_zero_inf_nan_preserve_float32: Bool32, pub shader_signed_zero_inf_nan_preserve_float64: Bool32, pub shader_denorm_preserve_float16: Bool32, pub shader_denorm_preserve_float32: Bool32, pub shader_denorm_preserve_float64: Bool32, pub shader_denorm_flush_to_zero_float16: Bool32, pub shader_denorm_flush_to_zero_float32: Bool32, pub shader_denorm_flush_to_zero_float64: Bool32, pub shader_rounding_mode_rte_float16: Bool32, pub shader_rounding_mode_rte_float32: Bool32, pub shader_rounding_mode_rte_float64: Bool32, pub shader_rounding_mode_rtz_float16: Bool32, pub shader_rounding_mode_rtz_float32: Bool32, pub shader_rounding_mode_rtz_float64: Bool32, pub max_update_after_bind_descriptors_in_all_pools: u32, pub shader_uniform_buffer_array_non_uniform_indexing_native: Bool32, pub shader_sampled_image_array_non_uniform_indexing_native: Bool32, pub shader_storage_buffer_array_non_uniform_indexing_native: Bool32, pub shader_storage_image_array_non_uniform_indexing_native: Bool32, pub shader_input_attachment_array_non_uniform_indexing_native: Bool32, pub robust_buffer_access_update_after_bind: Bool32, pub quad_divergent_implicit_lod: Bool32, pub max_per_stage_descriptor_update_after_bind_samplers: u32, pub max_per_stage_descriptor_update_after_bind_uniform_buffers: u32, pub max_per_stage_descriptor_update_after_bind_storage_buffers: u32, pub max_per_stage_descriptor_update_after_bind_sampled_images: u32, pub max_per_stage_descriptor_update_after_bind_storage_images: u32, pub max_per_stage_descriptor_update_after_bind_input_attachments: u32, pub max_per_stage_update_after_bind_resources: u32, pub max_descriptor_set_update_after_bind_samplers: u32, pub max_descriptor_set_update_after_bind_uniform_buffers: u32, pub max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32, pub max_descriptor_set_update_after_bind_storage_buffers: u32, pub max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32, pub max_descriptor_set_update_after_bind_sampled_images: u32, pub max_descriptor_set_update_after_bind_storage_images: u32, pub max_descriptor_set_update_after_bind_input_attachments: u32, pub supported_depth_resolve_modes: ResolveModeFlags, pub supported_stencil_resolve_modes: ResolveModeFlags, pub independent_resolve_none: Bool32, pub independent_resolve: Bool32, pub filter_minmax_single_component_formats: Bool32, pub filter_minmax_image_component_mapping: Bool32, pub max_timeline_semaphore_value_difference: u64, pub framebuffer_integer_color_sample_counts: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan12Properties<'_> {} unsafe impl Sync for PhysicalDeviceVulkan12Properties<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceVulkan12Properties<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceVulkan12Properties") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("driver_id", &self.driver_id) .field("driver_name", &self.driver_name_as_c_str()) .field("driver_info", &self.driver_info_as_c_str()) .field("conformance_version", &self.conformance_version) .field( "denorm_behavior_independence", &self.denorm_behavior_independence, ) .field( "rounding_mode_independence", &self.rounding_mode_independence, ) .field( "shader_signed_zero_inf_nan_preserve_float16", &self.shader_signed_zero_inf_nan_preserve_float16, ) .field( "shader_signed_zero_inf_nan_preserve_float32", &self.shader_signed_zero_inf_nan_preserve_float32, ) .field( "shader_signed_zero_inf_nan_preserve_float64", &self.shader_signed_zero_inf_nan_preserve_float64, ) .field( "shader_denorm_preserve_float16", &self.shader_denorm_preserve_float16, ) .field( "shader_denorm_preserve_float32", &self.shader_denorm_preserve_float32, ) .field( "shader_denorm_preserve_float64", &self.shader_denorm_preserve_float64, ) .field( "shader_denorm_flush_to_zero_float16", &self.shader_denorm_flush_to_zero_float16, ) .field( "shader_denorm_flush_to_zero_float32", &self.shader_denorm_flush_to_zero_float32, ) .field( "shader_denorm_flush_to_zero_float64", &self.shader_denorm_flush_to_zero_float64, ) .field( "shader_rounding_mode_rte_float16", &self.shader_rounding_mode_rte_float16, ) .field( "shader_rounding_mode_rte_float32", &self.shader_rounding_mode_rte_float32, ) .field( "shader_rounding_mode_rte_float64", &self.shader_rounding_mode_rte_float64, ) .field( "shader_rounding_mode_rtz_float16", &self.shader_rounding_mode_rtz_float16, ) .field( "shader_rounding_mode_rtz_float32", &self.shader_rounding_mode_rtz_float32, ) .field( "shader_rounding_mode_rtz_float64", &self.shader_rounding_mode_rtz_float64, ) .field( "max_update_after_bind_descriptors_in_all_pools", &self.max_update_after_bind_descriptors_in_all_pools, ) .field( "shader_uniform_buffer_array_non_uniform_indexing_native", &self.shader_uniform_buffer_array_non_uniform_indexing_native, ) .field( "shader_sampled_image_array_non_uniform_indexing_native", &self.shader_sampled_image_array_non_uniform_indexing_native, ) .field( "shader_storage_buffer_array_non_uniform_indexing_native", &self.shader_storage_buffer_array_non_uniform_indexing_native, ) .field( "shader_storage_image_array_non_uniform_indexing_native", &self.shader_storage_image_array_non_uniform_indexing_native, ) .field( "shader_input_attachment_array_non_uniform_indexing_native", &self.shader_input_attachment_array_non_uniform_indexing_native, ) .field( "robust_buffer_access_update_after_bind", &self.robust_buffer_access_update_after_bind, ) .field( "quad_divergent_implicit_lod", &self.quad_divergent_implicit_lod, ) .field( "max_per_stage_descriptor_update_after_bind_samplers", &self.max_per_stage_descriptor_update_after_bind_samplers, ) .field( "max_per_stage_descriptor_update_after_bind_uniform_buffers", &self.max_per_stage_descriptor_update_after_bind_uniform_buffers, ) .field( "max_per_stage_descriptor_update_after_bind_storage_buffers", &self.max_per_stage_descriptor_update_after_bind_storage_buffers, ) .field( "max_per_stage_descriptor_update_after_bind_sampled_images", &self.max_per_stage_descriptor_update_after_bind_sampled_images, ) .field( "max_per_stage_descriptor_update_after_bind_storage_images", &self.max_per_stage_descriptor_update_after_bind_storage_images, ) .field( "max_per_stage_descriptor_update_after_bind_input_attachments", &self.max_per_stage_descriptor_update_after_bind_input_attachments, ) .field( "max_per_stage_update_after_bind_resources", &self.max_per_stage_update_after_bind_resources, ) .field( "max_descriptor_set_update_after_bind_samplers", &self.max_descriptor_set_update_after_bind_samplers, ) .field( "max_descriptor_set_update_after_bind_uniform_buffers", &self.max_descriptor_set_update_after_bind_uniform_buffers, ) .field( "max_descriptor_set_update_after_bind_uniform_buffers_dynamic", &self.max_descriptor_set_update_after_bind_uniform_buffers_dynamic, ) .field( "max_descriptor_set_update_after_bind_storage_buffers", &self.max_descriptor_set_update_after_bind_storage_buffers, ) .field( "max_descriptor_set_update_after_bind_storage_buffers_dynamic", &self.max_descriptor_set_update_after_bind_storage_buffers_dynamic, ) .field( "max_descriptor_set_update_after_bind_sampled_images", &self.max_descriptor_set_update_after_bind_sampled_images, ) .field( "max_descriptor_set_update_after_bind_storage_images", &self.max_descriptor_set_update_after_bind_storage_images, ) .field( "max_descriptor_set_update_after_bind_input_attachments", &self.max_descriptor_set_update_after_bind_input_attachments, ) .field( "supported_depth_resolve_modes", &self.supported_depth_resolve_modes, ) .field( "supported_stencil_resolve_modes", &self.supported_stencil_resolve_modes, ) .field("independent_resolve_none", &self.independent_resolve_none) .field("independent_resolve", &self.independent_resolve) .field( "filter_minmax_single_component_formats", &self.filter_minmax_single_component_formats, ) .field( "filter_minmax_image_component_mapping", &self.filter_minmax_image_component_mapping, ) .field( "max_timeline_semaphore_value_difference", &self.max_timeline_semaphore_value_difference, ) .field( "framebuffer_integer_color_sample_counts", &self.framebuffer_integer_color_sample_counts, ) .finish() } } impl ::core::default::Default for PhysicalDeviceVulkan12Properties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), driver_id: DriverId::default(), driver_name: unsafe { ::core::mem::zeroed() }, driver_info: unsafe { ::core::mem::zeroed() }, conformance_version: ConformanceVersion::default(), denorm_behavior_independence: ShaderFloatControlsIndependence::default(), rounding_mode_independence: ShaderFloatControlsIndependence::default(), shader_signed_zero_inf_nan_preserve_float16: Bool32::default(), shader_signed_zero_inf_nan_preserve_float32: Bool32::default(), shader_signed_zero_inf_nan_preserve_float64: Bool32::default(), shader_denorm_preserve_float16: Bool32::default(), shader_denorm_preserve_float32: Bool32::default(), shader_denorm_preserve_float64: Bool32::default(), shader_denorm_flush_to_zero_float16: Bool32::default(), shader_denorm_flush_to_zero_float32: Bool32::default(), shader_denorm_flush_to_zero_float64: Bool32::default(), shader_rounding_mode_rte_float16: Bool32::default(), shader_rounding_mode_rte_float32: Bool32::default(), shader_rounding_mode_rte_float64: Bool32::default(), shader_rounding_mode_rtz_float16: Bool32::default(), shader_rounding_mode_rtz_float32: Bool32::default(), shader_rounding_mode_rtz_float64: Bool32::default(), max_update_after_bind_descriptors_in_all_pools: u32::default(), shader_uniform_buffer_array_non_uniform_indexing_native: Bool32::default(), shader_sampled_image_array_non_uniform_indexing_native: Bool32::default(), shader_storage_buffer_array_non_uniform_indexing_native: Bool32::default(), shader_storage_image_array_non_uniform_indexing_native: Bool32::default(), shader_input_attachment_array_non_uniform_indexing_native: Bool32::default(), robust_buffer_access_update_after_bind: Bool32::default(), quad_divergent_implicit_lod: Bool32::default(), max_per_stage_descriptor_update_after_bind_samplers: u32::default(), max_per_stage_descriptor_update_after_bind_uniform_buffers: u32::default(), max_per_stage_descriptor_update_after_bind_storage_buffers: u32::default(), max_per_stage_descriptor_update_after_bind_sampled_images: u32::default(), max_per_stage_descriptor_update_after_bind_storage_images: u32::default(), max_per_stage_descriptor_update_after_bind_input_attachments: u32::default(), max_per_stage_update_after_bind_resources: u32::default(), max_descriptor_set_update_after_bind_samplers: u32::default(), max_descriptor_set_update_after_bind_uniform_buffers: u32::default(), max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32::default(), max_descriptor_set_update_after_bind_storage_buffers: u32::default(), max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32::default(), max_descriptor_set_update_after_bind_sampled_images: u32::default(), max_descriptor_set_update_after_bind_storage_images: u32::default(), max_descriptor_set_update_after_bind_input_attachments: u32::default(), supported_depth_resolve_modes: ResolveModeFlags::default(), supported_stencil_resolve_modes: ResolveModeFlags::default(), independent_resolve_none: Bool32::default(), independent_resolve: Bool32::default(), filter_minmax_single_component_formats: Bool32::default(), filter_minmax_image_component_mapping: Bool32::default(), max_timeline_semaphore_value_difference: u64::default(), framebuffer_integer_color_sample_counts: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan12Properties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan12Properties<'_> {} impl<'a> PhysicalDeviceVulkan12Properties<'a> { #[inline] pub fn driver_id(mut self, driver_id: DriverId) -> Self { self.driver_id = driver_id; self } #[inline] pub fn driver_name( mut self, driver_name: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.driver_name, driver_name).map(|()| self) } #[inline] pub fn driver_name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.driver_name) } #[inline] pub fn driver_info( mut self, driver_info: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.driver_info, driver_info).map(|()| self) } #[inline] pub fn driver_info_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.driver_info) } #[inline] pub fn conformance_version(mut self, conformance_version: ConformanceVersion) -> Self { self.conformance_version = conformance_version; self } #[inline] pub fn denorm_behavior_independence( mut self, denorm_behavior_independence: ShaderFloatControlsIndependence, ) -> Self { self.denorm_behavior_independence = denorm_behavior_independence; self } #[inline] pub fn rounding_mode_independence( mut self, rounding_mode_independence: ShaderFloatControlsIndependence, ) -> Self { self.rounding_mode_independence = rounding_mode_independence; self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float16( mut self, shader_signed_zero_inf_nan_preserve_float16: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float16 = shader_signed_zero_inf_nan_preserve_float16.into(); self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float32( mut self, shader_signed_zero_inf_nan_preserve_float32: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float32 = shader_signed_zero_inf_nan_preserve_float32.into(); self } #[inline] pub fn shader_signed_zero_inf_nan_preserve_float64( mut self, shader_signed_zero_inf_nan_preserve_float64: bool, ) -> Self { self.shader_signed_zero_inf_nan_preserve_float64 = shader_signed_zero_inf_nan_preserve_float64.into(); self } #[inline] pub fn shader_denorm_preserve_float16(mut self, shader_denorm_preserve_float16: bool) -> Self { self.shader_denorm_preserve_float16 = shader_denorm_preserve_float16.into(); self } #[inline] pub fn shader_denorm_preserve_float32(mut self, shader_denorm_preserve_float32: bool) -> Self { self.shader_denorm_preserve_float32 = shader_denorm_preserve_float32.into(); self } #[inline] pub fn shader_denorm_preserve_float64(mut self, shader_denorm_preserve_float64: bool) -> Self { self.shader_denorm_preserve_float64 = shader_denorm_preserve_float64.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float16( mut self, shader_denorm_flush_to_zero_float16: bool, ) -> Self { self.shader_denorm_flush_to_zero_float16 = shader_denorm_flush_to_zero_float16.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float32( mut self, shader_denorm_flush_to_zero_float32: bool, ) -> Self { self.shader_denorm_flush_to_zero_float32 = shader_denorm_flush_to_zero_float32.into(); self } #[inline] pub fn shader_denorm_flush_to_zero_float64( mut self, shader_denorm_flush_to_zero_float64: bool, ) -> Self { self.shader_denorm_flush_to_zero_float64 = shader_denorm_flush_to_zero_float64.into(); self } #[inline] pub fn shader_rounding_mode_rte_float16( mut self, shader_rounding_mode_rte_float16: bool, ) -> Self { self.shader_rounding_mode_rte_float16 = shader_rounding_mode_rte_float16.into(); self } #[inline] pub fn shader_rounding_mode_rte_float32( mut self, shader_rounding_mode_rte_float32: bool, ) -> Self { self.shader_rounding_mode_rte_float32 = shader_rounding_mode_rte_float32.into(); self } #[inline] pub fn shader_rounding_mode_rte_float64( mut self, shader_rounding_mode_rte_float64: bool, ) -> Self { self.shader_rounding_mode_rte_float64 = shader_rounding_mode_rte_float64.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float16( mut self, shader_rounding_mode_rtz_float16: bool, ) -> Self { self.shader_rounding_mode_rtz_float16 = shader_rounding_mode_rtz_float16.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float32( mut self, shader_rounding_mode_rtz_float32: bool, ) -> Self { self.shader_rounding_mode_rtz_float32 = shader_rounding_mode_rtz_float32.into(); self } #[inline] pub fn shader_rounding_mode_rtz_float64( mut self, shader_rounding_mode_rtz_float64: bool, ) -> Self { self.shader_rounding_mode_rtz_float64 = shader_rounding_mode_rtz_float64.into(); self } #[inline] pub fn max_update_after_bind_descriptors_in_all_pools( mut self, max_update_after_bind_descriptors_in_all_pools: u32, ) -> Self { self.max_update_after_bind_descriptors_in_all_pools = max_update_after_bind_descriptors_in_all_pools; self } #[inline] pub fn shader_uniform_buffer_array_non_uniform_indexing_native( mut self, shader_uniform_buffer_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_uniform_buffer_array_non_uniform_indexing_native = shader_uniform_buffer_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_sampled_image_array_non_uniform_indexing_native( mut self, shader_sampled_image_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_sampled_image_array_non_uniform_indexing_native = shader_sampled_image_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_storage_buffer_array_non_uniform_indexing_native( mut self, shader_storage_buffer_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_storage_buffer_array_non_uniform_indexing_native = shader_storage_buffer_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_storage_image_array_non_uniform_indexing_native( mut self, shader_storage_image_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_storage_image_array_non_uniform_indexing_native = shader_storage_image_array_non_uniform_indexing_native.into(); self } #[inline] pub fn shader_input_attachment_array_non_uniform_indexing_native( mut self, shader_input_attachment_array_non_uniform_indexing_native: bool, ) -> Self { self.shader_input_attachment_array_non_uniform_indexing_native = shader_input_attachment_array_non_uniform_indexing_native.into(); self } #[inline] pub fn robust_buffer_access_update_after_bind( mut self, robust_buffer_access_update_after_bind: bool, ) -> Self { self.robust_buffer_access_update_after_bind = robust_buffer_access_update_after_bind.into(); self } #[inline] pub fn quad_divergent_implicit_lod(mut self, quad_divergent_implicit_lod: bool) -> Self { self.quad_divergent_implicit_lod = quad_divergent_implicit_lod.into(); self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_samplers( mut self, max_per_stage_descriptor_update_after_bind_samplers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_samplers = max_per_stage_descriptor_update_after_bind_samplers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_uniform_buffers( mut self, max_per_stage_descriptor_update_after_bind_uniform_buffers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_uniform_buffers = max_per_stage_descriptor_update_after_bind_uniform_buffers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_storage_buffers( mut self, max_per_stage_descriptor_update_after_bind_storage_buffers: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_storage_buffers = max_per_stage_descriptor_update_after_bind_storage_buffers; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_sampled_images( mut self, max_per_stage_descriptor_update_after_bind_sampled_images: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_sampled_images = max_per_stage_descriptor_update_after_bind_sampled_images; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_storage_images( mut self, max_per_stage_descriptor_update_after_bind_storage_images: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_storage_images = max_per_stage_descriptor_update_after_bind_storage_images; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_input_attachments( mut self, max_per_stage_descriptor_update_after_bind_input_attachments: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_input_attachments = max_per_stage_descriptor_update_after_bind_input_attachments; self } #[inline] pub fn max_per_stage_update_after_bind_resources( mut self, max_per_stage_update_after_bind_resources: u32, ) -> Self { self.max_per_stage_update_after_bind_resources = max_per_stage_update_after_bind_resources; self } #[inline] pub fn max_descriptor_set_update_after_bind_samplers( mut self, max_descriptor_set_update_after_bind_samplers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_samplers = max_descriptor_set_update_after_bind_samplers; self } #[inline] pub fn max_descriptor_set_update_after_bind_uniform_buffers( mut self, max_descriptor_set_update_after_bind_uniform_buffers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_uniform_buffers = max_descriptor_set_update_after_bind_uniform_buffers; self } #[inline] pub fn max_descriptor_set_update_after_bind_uniform_buffers_dynamic( mut self, max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_update_after_bind_uniform_buffers_dynamic = max_descriptor_set_update_after_bind_uniform_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_buffers( mut self, max_descriptor_set_update_after_bind_storage_buffers: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_buffers = max_descriptor_set_update_after_bind_storage_buffers; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_buffers_dynamic( mut self, max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_buffers_dynamic = max_descriptor_set_update_after_bind_storage_buffers_dynamic; self } #[inline] pub fn max_descriptor_set_update_after_bind_sampled_images( mut self, max_descriptor_set_update_after_bind_sampled_images: u32, ) -> Self { self.max_descriptor_set_update_after_bind_sampled_images = max_descriptor_set_update_after_bind_sampled_images; self } #[inline] pub fn max_descriptor_set_update_after_bind_storage_images( mut self, max_descriptor_set_update_after_bind_storage_images: u32, ) -> Self { self.max_descriptor_set_update_after_bind_storage_images = max_descriptor_set_update_after_bind_storage_images; self } #[inline] pub fn max_descriptor_set_update_after_bind_input_attachments( mut self, max_descriptor_set_update_after_bind_input_attachments: u32, ) -> Self { self.max_descriptor_set_update_after_bind_input_attachments = max_descriptor_set_update_after_bind_input_attachments; self } #[inline] pub fn supported_depth_resolve_modes( mut self, supported_depth_resolve_modes: ResolveModeFlags, ) -> Self { self.supported_depth_resolve_modes = supported_depth_resolve_modes; self } #[inline] pub fn supported_stencil_resolve_modes( mut self, supported_stencil_resolve_modes: ResolveModeFlags, ) -> Self { self.supported_stencil_resolve_modes = supported_stencil_resolve_modes; self } #[inline] pub fn independent_resolve_none(mut self, independent_resolve_none: bool) -> Self { self.independent_resolve_none = independent_resolve_none.into(); self } #[inline] pub fn independent_resolve(mut self, independent_resolve: bool) -> Self { self.independent_resolve = independent_resolve.into(); self } #[inline] pub fn filter_minmax_single_component_formats( mut self, filter_minmax_single_component_formats: bool, ) -> Self { self.filter_minmax_single_component_formats = filter_minmax_single_component_formats.into(); self } #[inline] pub fn filter_minmax_image_component_mapping( mut self, filter_minmax_image_component_mapping: bool, ) -> Self { self.filter_minmax_image_component_mapping = filter_minmax_image_component_mapping.into(); self } #[inline] pub fn max_timeline_semaphore_value_difference( mut self, max_timeline_semaphore_value_difference: u64, ) -> Self { self.max_timeline_semaphore_value_difference = max_timeline_semaphore_value_difference; self } #[inline] pub fn framebuffer_integer_color_sample_counts( mut self, framebuffer_integer_color_sample_counts: SampleCountFlags, ) -> Self { self.framebuffer_integer_color_sample_counts = framebuffer_integer_color_sample_counts; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan13Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub robust_image_access: Bool32, pub inline_uniform_block: Bool32, pub descriptor_binding_inline_uniform_block_update_after_bind: Bool32, pub pipeline_creation_cache_control: Bool32, pub private_data: Bool32, pub shader_demote_to_helper_invocation: Bool32, pub shader_terminate_invocation: Bool32, pub subgroup_size_control: Bool32, pub compute_full_subgroups: Bool32, pub synchronization2: Bool32, pub texture_compression_astc_hdr: Bool32, pub shader_zero_initialize_workgroup_memory: Bool32, pub dynamic_rendering: Bool32, pub shader_integer_dot_product: Bool32, pub maintenance4: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan13Features<'_> {} unsafe impl Sync for PhysicalDeviceVulkan13Features<'_> {} impl ::core::default::Default for PhysicalDeviceVulkan13Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), robust_image_access: Bool32::default(), inline_uniform_block: Bool32::default(), descriptor_binding_inline_uniform_block_update_after_bind: Bool32::default(), pipeline_creation_cache_control: Bool32::default(), private_data: Bool32::default(), shader_demote_to_helper_invocation: Bool32::default(), shader_terminate_invocation: Bool32::default(), subgroup_size_control: Bool32::default(), compute_full_subgroups: Bool32::default(), synchronization2: Bool32::default(), texture_compression_astc_hdr: Bool32::default(), shader_zero_initialize_workgroup_memory: Bool32::default(), dynamic_rendering: Bool32::default(), shader_integer_dot_product: Bool32::default(), maintenance4: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan13Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_3_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan13Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan13Features<'_> {} impl<'a> PhysicalDeviceVulkan13Features<'a> { #[inline] pub fn robust_image_access(mut self, robust_image_access: bool) -> Self { self.robust_image_access = robust_image_access.into(); self } #[inline] pub fn inline_uniform_block(mut self, inline_uniform_block: bool) -> Self { self.inline_uniform_block = inline_uniform_block.into(); self } #[inline] pub fn descriptor_binding_inline_uniform_block_update_after_bind( mut self, descriptor_binding_inline_uniform_block_update_after_bind: bool, ) -> Self { self.descriptor_binding_inline_uniform_block_update_after_bind = descriptor_binding_inline_uniform_block_update_after_bind.into(); self } #[inline] pub fn pipeline_creation_cache_control( mut self, pipeline_creation_cache_control: bool, ) -> Self { self.pipeline_creation_cache_control = pipeline_creation_cache_control.into(); self } #[inline] pub fn private_data(mut self, private_data: bool) -> Self { self.private_data = private_data.into(); self } #[inline] pub fn shader_demote_to_helper_invocation( mut self, shader_demote_to_helper_invocation: bool, ) -> Self { self.shader_demote_to_helper_invocation = shader_demote_to_helper_invocation.into(); self } #[inline] pub fn shader_terminate_invocation(mut self, shader_terminate_invocation: bool) -> Self { self.shader_terminate_invocation = shader_terminate_invocation.into(); self } #[inline] pub fn subgroup_size_control(mut self, subgroup_size_control: bool) -> Self { self.subgroup_size_control = subgroup_size_control.into(); self } #[inline] pub fn compute_full_subgroups(mut self, compute_full_subgroups: bool) -> Self { self.compute_full_subgroups = compute_full_subgroups.into(); self } #[inline] pub fn synchronization2(mut self, synchronization2: bool) -> Self { self.synchronization2 = synchronization2.into(); self } #[inline] pub fn texture_compression_astc_hdr(mut self, texture_compression_astc_hdr: bool) -> Self { self.texture_compression_astc_hdr = texture_compression_astc_hdr.into(); self } #[inline] pub fn shader_zero_initialize_workgroup_memory( mut self, shader_zero_initialize_workgroup_memory: bool, ) -> Self { self.shader_zero_initialize_workgroup_memory = shader_zero_initialize_workgroup_memory.into(); self } #[inline] pub fn dynamic_rendering(mut self, dynamic_rendering: bool) -> Self { self.dynamic_rendering = dynamic_rendering.into(); self } #[inline] pub fn shader_integer_dot_product(mut self, shader_integer_dot_product: bool) -> Self { self.shader_integer_dot_product = shader_integer_dot_product.into(); self } #[inline] pub fn maintenance4(mut self, maintenance4: bool) -> Self { self.maintenance4 = maintenance4.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVulkan13Properties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_subgroup_size: u32, pub max_subgroup_size: u32, pub max_compute_workgroup_subgroups: u32, pub required_subgroup_size_stages: ShaderStageFlags, pub max_inline_uniform_block_size: u32, pub max_per_stage_descriptor_inline_uniform_blocks: u32, pub max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32, pub max_descriptor_set_inline_uniform_blocks: u32, pub max_descriptor_set_update_after_bind_inline_uniform_blocks: u32, pub max_inline_uniform_total_size: u32, pub integer_dot_product8_bit_unsigned_accelerated: Bool32, pub integer_dot_product8_bit_signed_accelerated: Bool32, pub integer_dot_product8_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_unsigned_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_signed_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: Bool32, pub integer_dot_product16_bit_unsigned_accelerated: Bool32, pub integer_dot_product16_bit_signed_accelerated: Bool32, pub integer_dot_product16_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product32_bit_unsigned_accelerated: Bool32, pub integer_dot_product32_bit_signed_accelerated: Bool32, pub integer_dot_product32_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product64_bit_unsigned_accelerated: Bool32, pub integer_dot_product64_bit_signed_accelerated: Bool32, pub integer_dot_product64_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: Bool32, pub storage_texel_buffer_offset_alignment_bytes: DeviceSize, pub storage_texel_buffer_offset_single_texel_alignment: Bool32, pub uniform_texel_buffer_offset_alignment_bytes: DeviceSize, pub uniform_texel_buffer_offset_single_texel_alignment: Bool32, pub max_buffer_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVulkan13Properties<'_> {} unsafe impl Sync for PhysicalDeviceVulkan13Properties<'_> {} impl ::core::default::Default for PhysicalDeviceVulkan13Properties<'_> { #[inline] fn default() -> Self { Self { s_type : Self :: STRUCTURE_TYPE , p_next : :: core :: ptr :: null_mut () , min_subgroup_size : u32 :: default () , max_subgroup_size : u32 :: default () , max_compute_workgroup_subgroups : u32 :: default () , required_subgroup_size_stages : ShaderStageFlags :: default () , max_inline_uniform_block_size : u32 :: default () , max_per_stage_descriptor_inline_uniform_blocks : u32 :: default () , max_per_stage_descriptor_update_after_bind_inline_uniform_blocks : u32 :: default () , max_descriptor_set_inline_uniform_blocks : u32 :: default () , max_descriptor_set_update_after_bind_inline_uniform_blocks : u32 :: default () , max_inline_uniform_total_size : u32 :: default () , integer_dot_product8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product64_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated : Bool32 :: default () , storage_texel_buffer_offset_alignment_bytes : DeviceSize :: default () , storage_texel_buffer_offset_single_texel_alignment : Bool32 :: default () , uniform_texel_buffer_offset_alignment_bytes : DeviceSize :: default () , uniform_texel_buffer_offset_single_texel_alignment : Bool32 :: default () , max_buffer_size : DeviceSize :: default () , _marker : PhantomData , } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVulkan13Properties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan13Properties<'_> {} impl<'a> PhysicalDeviceVulkan13Properties<'a> { #[inline] pub fn min_subgroup_size(mut self, min_subgroup_size: u32) -> Self { self.min_subgroup_size = min_subgroup_size; self } #[inline] pub fn max_subgroup_size(mut self, max_subgroup_size: u32) -> Self { self.max_subgroup_size = max_subgroup_size; self } #[inline] pub fn max_compute_workgroup_subgroups(mut self, max_compute_workgroup_subgroups: u32) -> Self { self.max_compute_workgroup_subgroups = max_compute_workgroup_subgroups; self } #[inline] pub fn required_subgroup_size_stages( mut self, required_subgroup_size_stages: ShaderStageFlags, ) -> Self { self.required_subgroup_size_stages = required_subgroup_size_stages; self } #[inline] pub fn max_inline_uniform_block_size(mut self, max_inline_uniform_block_size: u32) -> Self { self.max_inline_uniform_block_size = max_inline_uniform_block_size; self } #[inline] pub fn max_per_stage_descriptor_inline_uniform_blocks( mut self, max_per_stage_descriptor_inline_uniform_blocks: u32, ) -> Self { self.max_per_stage_descriptor_inline_uniform_blocks = max_per_stage_descriptor_inline_uniform_blocks; self } #[inline] pub fn max_per_stage_descriptor_update_after_bind_inline_uniform_blocks( mut self, max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32, ) -> Self { self.max_per_stage_descriptor_update_after_bind_inline_uniform_blocks = max_per_stage_descriptor_update_after_bind_inline_uniform_blocks; self } #[inline] pub fn max_descriptor_set_inline_uniform_blocks( mut self, max_descriptor_set_inline_uniform_blocks: u32, ) -> Self { self.max_descriptor_set_inline_uniform_blocks = max_descriptor_set_inline_uniform_blocks; self } #[inline] pub fn max_descriptor_set_update_after_bind_inline_uniform_blocks( mut self, max_descriptor_set_update_after_bind_inline_uniform_blocks: u32, ) -> Self { self.max_descriptor_set_update_after_bind_inline_uniform_blocks = max_descriptor_set_update_after_bind_inline_uniform_blocks; self } #[inline] pub fn max_inline_uniform_total_size(mut self, max_inline_uniform_total_size: u32) -> Self { self.max_inline_uniform_total_size = max_inline_uniform_total_size; self } #[inline] pub fn integer_dot_product8_bit_unsigned_accelerated( mut self, integer_dot_product8_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_unsigned_accelerated = integer_dot_product8_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product8_bit_signed_accelerated( mut self, integer_dot_product8_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_signed_accelerated = integer_dot_product8_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product8_bit_mixed_signedness_accelerated( mut self, integer_dot_product8_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_mixed_signedness_accelerated = integer_dot_product8_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_unsigned_accelerated( mut self, integer_dot_product4x8_bit_packed_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_unsigned_accelerated = integer_dot_product4x8_bit_packed_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_signed_accelerated( mut self, integer_dot_product4x8_bit_packed_signed_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_signed_accelerated = integer_dot_product4x8_bit_packed_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_mixed_signedness_accelerated( mut self, integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product4x8_bit_packed_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_unsigned_accelerated( mut self, integer_dot_product16_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_unsigned_accelerated = integer_dot_product16_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_signed_accelerated( mut self, integer_dot_product16_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_signed_accelerated = integer_dot_product16_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_mixed_signedness_accelerated( mut self, integer_dot_product16_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_mixed_signedness_accelerated = integer_dot_product16_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_unsigned_accelerated( mut self, integer_dot_product32_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_unsigned_accelerated = integer_dot_product32_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_signed_accelerated( mut self, integer_dot_product32_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_signed_accelerated = integer_dot_product32_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_mixed_signedness_accelerated( mut self, integer_dot_product32_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_mixed_signedness_accelerated = integer_dot_product32_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_unsigned_accelerated( mut self, integer_dot_product64_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_unsigned_accelerated = integer_dot_product64_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_signed_accelerated( mut self, integer_dot_product64_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_signed_accelerated = integer_dot_product64_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_mixed_signedness_accelerated( mut self, integer_dot_product64_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_mixed_signedness_accelerated = integer_dot_product64_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_signed_accelerated = integer_dot_product_accumulating_saturating8_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : bool, ) -> Self { self . integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated . into () ; self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_signed_accelerated = integer_dot_product_accumulating_saturating16_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_signed_accelerated = integer_dot_product_accumulating_saturating32_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_signed_accelerated = integer_dot_product_accumulating_saturating64_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn storage_texel_buffer_offset_alignment_bytes( mut self, storage_texel_buffer_offset_alignment_bytes: DeviceSize, ) -> Self { self.storage_texel_buffer_offset_alignment_bytes = storage_texel_buffer_offset_alignment_bytes; self } #[inline] pub fn storage_texel_buffer_offset_single_texel_alignment( mut self, storage_texel_buffer_offset_single_texel_alignment: bool, ) -> Self { self.storage_texel_buffer_offset_single_texel_alignment = storage_texel_buffer_offset_single_texel_alignment.into(); self } #[inline] pub fn uniform_texel_buffer_offset_alignment_bytes( mut self, uniform_texel_buffer_offset_alignment_bytes: DeviceSize, ) -> Self { self.uniform_texel_buffer_offset_alignment_bytes = uniform_texel_buffer_offset_alignment_bytes; self } #[inline] pub fn uniform_texel_buffer_offset_single_texel_alignment( mut self, uniform_texel_buffer_offset_single_texel_alignment: bool, ) -> Self { self.uniform_texel_buffer_offset_single_texel_alignment = uniform_texel_buffer_offset_single_texel_alignment.into(); self } #[inline] pub fn max_buffer_size(mut self, max_buffer_size: DeviceSize) -> Self { self.max_buffer_size = max_buffer_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineCompilerControlCreateInfoAMD<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub compiler_control_flags: PipelineCompilerControlFlagsAMD, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineCompilerControlCreateInfoAMD<'_> {} unsafe impl Sync for PipelineCompilerControlCreateInfoAMD<'_> {} impl ::core::default::Default for PipelineCompilerControlCreateInfoAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), compiler_control_flags: PipelineCompilerControlFlagsAMD::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineCompilerControlCreateInfoAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCompilerControlCreateInfoAMD<'_> {} unsafe impl ExtendsComputePipelineCreateInfo for PipelineCompilerControlCreateInfoAMD<'_> {} unsafe impl ExtendsExecutionGraphPipelineCreateInfoAMDX for PipelineCompilerControlCreateInfoAMD<'_> { } impl<'a> PipelineCompilerControlCreateInfoAMD<'a> { #[inline] pub fn compiler_control_flags( mut self, compiler_control_flags: PipelineCompilerControlFlagsAMD, ) -> Self { self.compiler_control_flags = compiler_control_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCoherentMemoryFeaturesAMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_coherent_memory: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCoherentMemoryFeaturesAMD<'_> {} unsafe impl Sync for PhysicalDeviceCoherentMemoryFeaturesAMD<'_> {} impl ::core::default::Default for PhysicalDeviceCoherentMemoryFeaturesAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_coherent_memory: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCoherentMemoryFeaturesAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCoherentMemoryFeaturesAMD<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoherentMemoryFeaturesAMD<'_> {} impl<'a> PhysicalDeviceCoherentMemoryFeaturesAMD<'a> { #[inline] pub fn device_coherent_memory(mut self, device_coherent_memory: bool) -> Self { self.device_coherent_memory = device_coherent_memory.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceToolProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub name: [c_char; MAX_EXTENSION_NAME_SIZE], pub version: [c_char; MAX_EXTENSION_NAME_SIZE], pub purposes: ToolPurposeFlags, pub description: [c_char; MAX_DESCRIPTION_SIZE], pub layer: [c_char; MAX_EXTENSION_NAME_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceToolProperties<'_> {} unsafe impl Sync for PhysicalDeviceToolProperties<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for PhysicalDeviceToolProperties<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("PhysicalDeviceToolProperties") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("name", &self.name_as_c_str()) .field("version", &self.version_as_c_str()) .field("purposes", &self.purposes) .field("description", &self.description_as_c_str()) .field("layer", &self.layer_as_c_str()) .finish() } } impl ::core::default::Default for PhysicalDeviceToolProperties<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), name: unsafe { ::core::mem::zeroed() }, version: unsafe { ::core::mem::zeroed() }, purposes: ToolPurposeFlags::default(), description: unsafe { ::core::mem::zeroed() }, layer: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceToolProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TOOL_PROPERTIES; } impl<'a> PhysicalDeviceToolProperties<'a> { #[inline] pub fn name(mut self, name: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.name, name).map(|()| self) } #[inline] pub fn name_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.name) } #[inline] pub fn version( mut self, version: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.version, version).map(|()| self) } #[inline] pub fn version_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.version) } #[inline] pub fn purposes(mut self, purposes: ToolPurposeFlags) -> Self { self.purposes = purposes; self } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn layer(mut self, layer: &CStr) -> core::result::Result { write_c_str_slice_with_nul(&mut self.layer, layer).map(|()| self) } #[inline] pub fn layer_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.layer) } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerCustomBorderColorCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub custom_border_color: ClearColorValue, pub format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerCustomBorderColorCreateInfoEXT<'_> {} unsafe impl Sync for SamplerCustomBorderColorCreateInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for SamplerCustomBorderColorCreateInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("SamplerCustomBorderColorCreateInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("custom_border_color", &"union") .field("format", &self.format) .finish() } } impl ::core::default::Default for SamplerCustomBorderColorCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), custom_border_color: ClearColorValue::default(), format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerCustomBorderColorCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT; } unsafe impl ExtendsSamplerCreateInfo for SamplerCustomBorderColorCreateInfoEXT<'_> {} impl<'a> SamplerCustomBorderColorCreateInfoEXT<'a> { #[inline] pub fn custom_border_color(mut self, custom_border_color: ClearColorValue) -> Self { self.custom_border_color = custom_border_color; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCustomBorderColorPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_custom_border_color_samplers: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCustomBorderColorPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceCustomBorderColorPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceCustomBorderColorPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_custom_border_color_samplers: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCustomBorderColorPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCustomBorderColorPropertiesEXT<'_> {} impl<'a> PhysicalDeviceCustomBorderColorPropertiesEXT<'a> { #[inline] pub fn max_custom_border_color_samplers( mut self, max_custom_border_color_samplers: u32, ) -> Self { self.max_custom_border_color_samplers = max_custom_border_color_samplers; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCustomBorderColorFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub custom_border_colors: Bool32, pub custom_border_color_without_format: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCustomBorderColorFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceCustomBorderColorFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceCustomBorderColorFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), custom_border_colors: Bool32::default(), custom_border_color_without_format: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCustomBorderColorFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCustomBorderColorFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCustomBorderColorFeaturesEXT<'_> {} impl<'a> PhysicalDeviceCustomBorderColorFeaturesEXT<'a> { #[inline] pub fn custom_border_colors(mut self, custom_border_colors: bool) -> Self { self.custom_border_colors = custom_border_colors.into(); self } #[inline] pub fn custom_border_color_without_format( mut self, custom_border_color_without_format: bool, ) -> Self { self.custom_border_color_without_format = custom_border_color_without_format.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerBorderColorComponentMappingCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub components: ComponentMapping, pub srgb: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerBorderColorComponentMappingCreateInfoEXT<'_> {} unsafe impl Sync for SamplerBorderColorComponentMappingCreateInfoEXT<'_> {} impl ::core::default::Default for SamplerBorderColorComponentMappingCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), components: ComponentMapping::default(), srgb: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerBorderColorComponentMappingCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT; } unsafe impl ExtendsSamplerCreateInfo for SamplerBorderColorComponentMappingCreateInfoEXT<'_> {} impl<'a> SamplerBorderColorComponentMappingCreateInfoEXT<'a> { #[inline] pub fn components(mut self, components: ComponentMapping) -> Self { self.components = components; self } #[inline] pub fn srgb(mut self, srgb: bool) -> Self { self.srgb = srgb.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceBorderColorSwizzleFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub border_color_swizzle: Bool32, pub border_color_swizzle_from_image: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), border_color_swizzle: Bool32::default(), border_color_swizzle_from_image: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBorderColorSwizzleFeaturesEXT<'_> {} impl<'a> PhysicalDeviceBorderColorSwizzleFeaturesEXT<'a> { #[inline] pub fn border_color_swizzle(mut self, border_color_swizzle: bool) -> Self { self.border_color_swizzle = border_color_swizzle.into(); self } #[inline] pub fn border_color_swizzle_from_image( mut self, border_color_swizzle_from_image: bool, ) -> Self { self.border_color_swizzle_from_image = border_color_swizzle_from_image.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union DeviceOrHostAddressKHR { pub device_address: DeviceAddress, pub host_address: *mut c_void, } impl ::core::default::Default for DeviceOrHostAddressKHR { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union DeviceOrHostAddressConstKHR { pub device_address: DeviceAddress, pub host_address: *const c_void, } impl ::core::default::Default for DeviceOrHostAddressConstKHR { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union DeviceOrHostAddressConstAMDX { pub device_address: DeviceAddress, pub host_address: *const c_void, } impl ::core::default::Default for DeviceOrHostAddressConstAMDX { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureGeometryTrianglesDataKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub vertex_format: Format, pub vertex_data: DeviceOrHostAddressConstKHR, pub vertex_stride: DeviceSize, pub max_vertex: u32, pub index_type: IndexType, pub index_data: DeviceOrHostAddressConstKHR, pub transform_data: DeviceOrHostAddressConstKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureGeometryTrianglesDataKHR<'_> {} unsafe impl Sync for AccelerationStructureGeometryTrianglesDataKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureGeometryTrianglesDataKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureGeometryTrianglesDataKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("vertex_format", &self.vertex_format) .field("vertex_data", &"union") .field("vertex_stride", &self.vertex_stride) .field("max_vertex", &self.max_vertex) .field("index_type", &self.index_type) .field("index_data", &"union") .field("transform_data", &"union") .finish() } } impl ::core::default::Default for AccelerationStructureGeometryTrianglesDataKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), vertex_format: Format::default(), vertex_data: DeviceOrHostAddressConstKHR::default(), vertex_stride: DeviceSize::default(), max_vertex: u32::default(), index_type: IndexType::default(), index_data: DeviceOrHostAddressConstKHR::default(), transform_data: DeviceOrHostAddressConstKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureGeometryTrianglesDataKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR; } pub unsafe trait ExtendsAccelerationStructureGeometryTrianglesDataKHR {} impl<'a> AccelerationStructureGeometryTrianglesDataKHR<'a> { #[inline] pub fn vertex_format(mut self, vertex_format: Format) -> Self { self.vertex_format = vertex_format; self } #[inline] pub fn vertex_data(mut self, vertex_data: DeviceOrHostAddressConstKHR) -> Self { self.vertex_data = vertex_data; self } #[inline] pub fn vertex_stride(mut self, vertex_stride: DeviceSize) -> Self { self.vertex_stride = vertex_stride; self } #[inline] pub fn max_vertex(mut self, max_vertex: u32) -> Self { self.max_vertex = max_vertex; self } #[inline] pub fn index_type(mut self, index_type: IndexType) -> Self { self.index_type = index_type; self } #[inline] pub fn index_data(mut self, index_data: DeviceOrHostAddressConstKHR) -> Self { self.index_data = index_data; self } #[inline] pub fn transform_data(mut self, transform_data: DeviceOrHostAddressConstKHR) -> Self { self.transform_data = transform_data; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureGeometryAabbsDataKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub data: DeviceOrHostAddressConstKHR, pub stride: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureGeometryAabbsDataKHR<'_> {} unsafe impl Sync for AccelerationStructureGeometryAabbsDataKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureGeometryAabbsDataKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureGeometryAabbsDataKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("data", &"union") .field("stride", &self.stride) .finish() } } impl ::core::default::Default for AccelerationStructureGeometryAabbsDataKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), data: DeviceOrHostAddressConstKHR::default(), stride: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureGeometryAabbsDataKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR; } impl<'a> AccelerationStructureGeometryAabbsDataKHR<'a> { #[inline] pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self { self.data = data; self } #[inline] pub fn stride(mut self, stride: DeviceSize) -> Self { self.stride = stride; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureGeometryInstancesDataKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub array_of_pointers: Bool32, pub data: DeviceOrHostAddressConstKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureGeometryInstancesDataKHR<'_> {} unsafe impl Sync for AccelerationStructureGeometryInstancesDataKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureGeometryInstancesDataKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureGeometryInstancesDataKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("array_of_pointers", &self.array_of_pointers) .field("data", &"union") .finish() } } impl ::core::default::Default for AccelerationStructureGeometryInstancesDataKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), array_of_pointers: Bool32::default(), data: DeviceOrHostAddressConstKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureGeometryInstancesDataKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR; } impl<'a> AccelerationStructureGeometryInstancesDataKHR<'a> { #[inline] pub fn array_of_pointers(mut self, array_of_pointers: bool) -> Self { self.array_of_pointers = array_of_pointers.into(); self } #[inline] pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self { self.data = data; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union AccelerationStructureGeometryDataKHR<'a> { pub triangles: AccelerationStructureGeometryTrianglesDataKHR<'a>, pub aabbs: AccelerationStructureGeometryAabbsDataKHR<'a>, pub instances: AccelerationStructureGeometryInstancesDataKHR<'a>, } impl<'a> ::core::default::Default for AccelerationStructureGeometryDataKHR<'a> { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureGeometryKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub geometry_type: GeometryTypeKHR, pub geometry: AccelerationStructureGeometryDataKHR<'a>, pub flags: GeometryFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureGeometryKHR<'_> {} unsafe impl Sync for AccelerationStructureGeometryKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureGeometryKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureGeometryKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("geometry_type", &self.geometry_type) .field("geometry", &"union") .field("flags", &self.flags) .finish() } } impl ::core::default::Default for AccelerationStructureGeometryKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), geometry_type: GeometryTypeKHR::default(), geometry: AccelerationStructureGeometryDataKHR::default(), flags: GeometryFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureGeometryKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_KHR; } impl<'a> AccelerationStructureGeometryKHR<'a> { #[inline] pub fn geometry_type(mut self, geometry_type: GeometryTypeKHR) -> Self { self.geometry_type = geometry_type; self } #[inline] pub fn geometry(mut self, geometry: AccelerationStructureGeometryDataKHR<'a>) -> Self { self.geometry = geometry; self } #[inline] pub fn flags(mut self, flags: GeometryFlagsKHR) -> Self { self.flags = flags; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureBuildGeometryInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: AccelerationStructureTypeKHR, pub flags: BuildAccelerationStructureFlagsKHR, pub mode: BuildAccelerationStructureModeKHR, pub src_acceleration_structure: AccelerationStructureKHR, pub dst_acceleration_structure: AccelerationStructureKHR, pub geometry_count: u32, pub p_geometries: *const AccelerationStructureGeometryKHR<'a>, pub pp_geometries: *const *const AccelerationStructureGeometryKHR<'a>, pub scratch_data: DeviceOrHostAddressKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureBuildGeometryInfoKHR<'_> {} unsafe impl Sync for AccelerationStructureBuildGeometryInfoKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureBuildGeometryInfoKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureBuildGeometryInfoKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("ty", &self.ty) .field("flags", &self.flags) .field("mode", &self.mode) .field( "src_acceleration_structure", &self.src_acceleration_structure, ) .field( "dst_acceleration_structure", &self.dst_acceleration_structure, ) .field("geometry_count", &self.geometry_count) .field("p_geometries", &self.p_geometries) .field("pp_geometries", &self.pp_geometries) .field("scratch_data", &"union") .finish() } } impl ::core::default::Default for AccelerationStructureBuildGeometryInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: AccelerationStructureTypeKHR::default(), flags: BuildAccelerationStructureFlagsKHR::default(), mode: BuildAccelerationStructureModeKHR::default(), src_acceleration_structure: AccelerationStructureKHR::default(), dst_acceleration_structure: AccelerationStructureKHR::default(), geometry_count: u32::default(), p_geometries: ::core::ptr::null(), pp_geometries: ::core::ptr::null(), scratch_data: DeviceOrHostAddressKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureBuildGeometryInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR; } impl<'a> AccelerationStructureBuildGeometryInfoKHR<'a> { #[inline] pub fn ty(mut self, ty: AccelerationStructureTypeKHR) -> Self { self.ty = ty; self } #[inline] pub fn flags(mut self, flags: BuildAccelerationStructureFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn mode(mut self, mode: BuildAccelerationStructureModeKHR) -> Self { self.mode = mode; self } #[inline] pub fn src_acceleration_structure( mut self, src_acceleration_structure: AccelerationStructureKHR, ) -> Self { self.src_acceleration_structure = src_acceleration_structure; self } #[inline] pub fn dst_acceleration_structure( mut self, dst_acceleration_structure: AccelerationStructureKHR, ) -> Self { self.dst_acceleration_structure = dst_acceleration_structure; self } #[inline] pub fn geometries(mut self, geometries: &'a [AccelerationStructureGeometryKHR<'a>]) -> Self { self.geometry_count = geometries.len() as _; self.p_geometries = geometries.as_ptr(); self } #[inline] pub fn geometries_ptrs( mut self, geometries_ptrs: &'a [&'a AccelerationStructureGeometryKHR<'a>], ) -> Self { self.geometry_count = geometries_ptrs.len() as _; self.pp_geometries = geometries_ptrs.as_ptr().cast(); self } #[inline] pub fn scratch_data(mut self, scratch_data: DeviceOrHostAddressKHR) -> Self { self.scratch_data = scratch_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AccelerationStructureBuildRangeInfoKHR { pub primitive_count: u32, pub primitive_offset: u32, pub first_vertex: u32, pub transform_offset: u32, } impl AccelerationStructureBuildRangeInfoKHR { #[inline] pub fn primitive_count(mut self, primitive_count: u32) -> Self { self.primitive_count = primitive_count; self } #[inline] pub fn primitive_offset(mut self, primitive_offset: u32) -> Self { self.primitive_offset = primitive_offset; self } #[inline] pub fn first_vertex(mut self, first_vertex: u32) -> Self { self.first_vertex = first_vertex; self } #[inline] pub fn transform_offset(mut self, transform_offset: u32) -> Self { self.transform_offset = transform_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub create_flags: AccelerationStructureCreateFlagsKHR, pub buffer: Buffer, pub offset: DeviceSize, pub size: DeviceSize, pub ty: AccelerationStructureTypeKHR, pub device_address: DeviceAddress, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureCreateInfoKHR<'_> {} unsafe impl Sync for AccelerationStructureCreateInfoKHR<'_> {} impl ::core::default::Default for AccelerationStructureCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), create_flags: AccelerationStructureCreateFlagsKHR::default(), buffer: Buffer::default(), offset: DeviceSize::default(), size: DeviceSize::default(), ty: AccelerationStructureTypeKHR::default(), device_address: DeviceAddress::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_CREATE_INFO_KHR; } pub unsafe trait ExtendsAccelerationStructureCreateInfoKHR {} impl<'a> AccelerationStructureCreateInfoKHR<'a> { #[inline] pub fn create_flags(mut self, create_flags: AccelerationStructureCreateFlagsKHR) -> Self { self.create_flags = create_flags; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn ty(mut self, ty: AccelerationStructureTypeKHR) -> Self { self.ty = ty; self } #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AabbPositionsKHR { pub min_x: f32, pub min_y: f32, pub min_z: f32, pub max_x: f32, pub max_y: f32, pub max_z: f32, } impl AabbPositionsKHR { #[inline] pub fn min_x(mut self, min_x: f32) -> Self { self.min_x = min_x; self } #[inline] pub fn min_y(mut self, min_y: f32) -> Self { self.min_y = min_y; self } #[inline] pub fn min_z(mut self, min_z: f32) -> Self { self.min_z = min_z; self } #[inline] pub fn max_x(mut self, max_x: f32) -> Self { self.max_x = max_x; self } #[inline] pub fn max_y(mut self, max_y: f32) -> Self { self.max_y = max_y; self } #[inline] pub fn max_z(mut self, max_z: f32) -> Self { self.max_z = max_z; self } } #[repr(C)] #[derive(Copy, Clone)] pub struct TransformMatrixKHR { pub matrix: [f32; 12], } #[repr(C)] #[derive(Copy, Clone)] pub union AccelerationStructureReferenceKHR { pub device_handle: DeviceAddress, pub host_handle: AccelerationStructureKHR, } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub struct AccelerationStructureInstanceKHR { pub transform: TransformMatrixKHR, #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"] pub instance_custom_index_and_mask: Packed24_8, #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"] pub instance_shader_binding_table_record_offset_and_flags: Packed24_8, pub acceleration_structure_reference: AccelerationStructureReferenceKHR, } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureDeviceAddressInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure: AccelerationStructureKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureDeviceAddressInfoKHR<'_> {} unsafe impl Sync for AccelerationStructureDeviceAddressInfoKHR<'_> {} impl ::core::default::Default for AccelerationStructureDeviceAddressInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure: AccelerationStructureKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureDeviceAddressInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR; } impl<'a> AccelerationStructureDeviceAddressInfoKHR<'a> { #[inline] pub fn acceleration_structure( mut self, acceleration_structure: AccelerationStructureKHR, ) -> Self { self.acceleration_structure = acceleration_structure; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureVersionInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_version_data: *const [u8; 2 * UUID_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureVersionInfoKHR<'_> {} unsafe impl Sync for AccelerationStructureVersionInfoKHR<'_> {} impl ::core::default::Default for AccelerationStructureVersionInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_version_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureVersionInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_VERSION_INFO_KHR; } impl<'a> AccelerationStructureVersionInfoKHR<'a> { #[inline] pub fn version_data(mut self, version_data: &'a [u8; 2 * UUID_SIZE]) -> Self { self.p_version_data = version_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyAccelerationStructureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: AccelerationStructureKHR, pub dst: AccelerationStructureKHR, pub mode: CopyAccelerationStructureModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyAccelerationStructureInfoKHR<'_> {} unsafe impl Sync for CopyAccelerationStructureInfoKHR<'_> {} impl ::core::default::Default for CopyAccelerationStructureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: AccelerationStructureKHR::default(), dst: AccelerationStructureKHR::default(), mode: CopyAccelerationStructureModeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyAccelerationStructureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR; } impl<'a> CopyAccelerationStructureInfoKHR<'a> { #[inline] pub fn src(mut self, src: AccelerationStructureKHR) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: AccelerationStructureKHR) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self { self.mode = mode; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyAccelerationStructureToMemoryInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: AccelerationStructureKHR, pub dst: DeviceOrHostAddressKHR, pub mode: CopyAccelerationStructureModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyAccelerationStructureToMemoryInfoKHR<'_> {} unsafe impl Sync for CopyAccelerationStructureToMemoryInfoKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for CopyAccelerationStructureToMemoryInfoKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("CopyAccelerationStructureToMemoryInfoKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("src", &self.src) .field("dst", &"union") .field("mode", &self.mode) .finish() } } impl ::core::default::Default for CopyAccelerationStructureToMemoryInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: AccelerationStructureKHR::default(), dst: DeviceOrHostAddressKHR::default(), mode: CopyAccelerationStructureModeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyAccelerationStructureToMemoryInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR; } impl<'a> CopyAccelerationStructureToMemoryInfoKHR<'a> { #[inline] pub fn src(mut self, src: AccelerationStructureKHR) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: DeviceOrHostAddressKHR) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self { self.mode = mode; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyMemoryToAccelerationStructureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: DeviceOrHostAddressConstKHR, pub dst: AccelerationStructureKHR, pub mode: CopyAccelerationStructureModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyMemoryToAccelerationStructureInfoKHR<'_> {} unsafe impl Sync for CopyMemoryToAccelerationStructureInfoKHR<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for CopyMemoryToAccelerationStructureInfoKHR<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("CopyMemoryToAccelerationStructureInfoKHR") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("src", &"union") .field("dst", &self.dst) .field("mode", &self.mode) .finish() } } impl ::core::default::Default for CopyMemoryToAccelerationStructureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: DeviceOrHostAddressConstKHR::default(), dst: AccelerationStructureKHR::default(), mode: CopyAccelerationStructureModeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyMemoryToAccelerationStructureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR; } impl<'a> CopyMemoryToAccelerationStructureInfoKHR<'a> { #[inline] pub fn src(mut self, src: DeviceOrHostAddressConstKHR) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: AccelerationStructureKHR) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self { self.mode = mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RayTracingPipelineInterfaceCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_pipeline_ray_payload_size: u32, pub max_pipeline_ray_hit_attribute_size: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RayTracingPipelineInterfaceCreateInfoKHR<'_> {} unsafe impl Sync for RayTracingPipelineInterfaceCreateInfoKHR<'_> {} impl ::core::default::Default for RayTracingPipelineInterfaceCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_pipeline_ray_payload_size: u32::default(), max_pipeline_ray_hit_attribute_size: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RayTracingPipelineInterfaceCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR; } impl<'a> RayTracingPipelineInterfaceCreateInfoKHR<'a> { #[inline] pub fn max_pipeline_ray_payload_size(mut self, max_pipeline_ray_payload_size: u32) -> Self { self.max_pipeline_ray_payload_size = max_pipeline_ray_payload_size; self } #[inline] pub fn max_pipeline_ray_hit_attribute_size( mut self, max_pipeline_ray_hit_attribute_size: u32, ) -> Self { self.max_pipeline_ray_hit_attribute_size = max_pipeline_ray_hit_attribute_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineLibraryCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub library_count: u32, pub p_libraries: *const Pipeline, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineLibraryCreateInfoKHR<'_> {} unsafe impl Sync for PipelineLibraryCreateInfoKHR<'_> {} impl ::core::default::Default for PipelineLibraryCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), library_count: u32::default(), p_libraries: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineLibraryCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_LIBRARY_CREATE_INFO_KHR; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineLibraryCreateInfoKHR<'_> {} impl<'a> PipelineLibraryCreateInfoKHR<'a> { #[inline] pub fn libraries(mut self, libraries: &'a [Pipeline]) -> Self { self.library_count = libraries.len() as _; self.p_libraries = libraries.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedDynamicStateFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub extended_dynamic_state: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), extended_dynamic_state: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicStateFeaturesEXT<'_> {} impl<'a> PhysicalDeviceExtendedDynamicStateFeaturesEXT<'a> { #[inline] pub fn extended_dynamic_state(mut self, extended_dynamic_state: bool) -> Self { self.extended_dynamic_state = extended_dynamic_state.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedDynamicState2FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub extended_dynamic_state2: Bool32, pub extended_dynamic_state2_logic_op: Bool32, pub extended_dynamic_state2_patch_control_points: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), extended_dynamic_state2: Bool32::default(), extended_dynamic_state2_logic_op: Bool32::default(), extended_dynamic_state2_patch_control_points: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState2FeaturesEXT<'_> {} impl<'a> PhysicalDeviceExtendedDynamicState2FeaturesEXT<'a> { #[inline] pub fn extended_dynamic_state2(mut self, extended_dynamic_state2: bool) -> Self { self.extended_dynamic_state2 = extended_dynamic_state2.into(); self } #[inline] pub fn extended_dynamic_state2_logic_op( mut self, extended_dynamic_state2_logic_op: bool, ) -> Self { self.extended_dynamic_state2_logic_op = extended_dynamic_state2_logic_op.into(); self } #[inline] pub fn extended_dynamic_state2_patch_control_points( mut self, extended_dynamic_state2_patch_control_points: bool, ) -> Self { self.extended_dynamic_state2_patch_control_points = extended_dynamic_state2_patch_control_points.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedDynamicState3FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub extended_dynamic_state3_tessellation_domain_origin: Bool32, pub extended_dynamic_state3_depth_clamp_enable: Bool32, pub extended_dynamic_state3_polygon_mode: Bool32, pub extended_dynamic_state3_rasterization_samples: Bool32, pub extended_dynamic_state3_sample_mask: Bool32, pub extended_dynamic_state3_alpha_to_coverage_enable: Bool32, pub extended_dynamic_state3_alpha_to_one_enable: Bool32, pub extended_dynamic_state3_logic_op_enable: Bool32, pub extended_dynamic_state3_color_blend_enable: Bool32, pub extended_dynamic_state3_color_blend_equation: Bool32, pub extended_dynamic_state3_color_write_mask: Bool32, pub extended_dynamic_state3_rasterization_stream: Bool32, pub extended_dynamic_state3_conservative_rasterization_mode: Bool32, pub extended_dynamic_state3_extra_primitive_overestimation_size: Bool32, pub extended_dynamic_state3_depth_clip_enable: Bool32, pub extended_dynamic_state3_sample_locations_enable: Bool32, pub extended_dynamic_state3_color_blend_advanced: Bool32, pub extended_dynamic_state3_provoking_vertex_mode: Bool32, pub extended_dynamic_state3_line_rasterization_mode: Bool32, pub extended_dynamic_state3_line_stipple_enable: Bool32, pub extended_dynamic_state3_depth_clip_negative_one_to_one: Bool32, pub extended_dynamic_state3_viewport_w_scaling_enable: Bool32, pub extended_dynamic_state3_viewport_swizzle: Bool32, pub extended_dynamic_state3_coverage_to_color_enable: Bool32, pub extended_dynamic_state3_coverage_to_color_location: Bool32, pub extended_dynamic_state3_coverage_modulation_mode: Bool32, pub extended_dynamic_state3_coverage_modulation_table_enable: Bool32, pub extended_dynamic_state3_coverage_modulation_table: Bool32, pub extended_dynamic_state3_coverage_reduction_mode: Bool32, pub extended_dynamic_state3_representative_fragment_test_enable: Bool32, pub extended_dynamic_state3_shading_rate_image_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), extended_dynamic_state3_tessellation_domain_origin: Bool32::default(), extended_dynamic_state3_depth_clamp_enable: Bool32::default(), extended_dynamic_state3_polygon_mode: Bool32::default(), extended_dynamic_state3_rasterization_samples: Bool32::default(), extended_dynamic_state3_sample_mask: Bool32::default(), extended_dynamic_state3_alpha_to_coverage_enable: Bool32::default(), extended_dynamic_state3_alpha_to_one_enable: Bool32::default(), extended_dynamic_state3_logic_op_enable: Bool32::default(), extended_dynamic_state3_color_blend_enable: Bool32::default(), extended_dynamic_state3_color_blend_equation: Bool32::default(), extended_dynamic_state3_color_write_mask: Bool32::default(), extended_dynamic_state3_rasterization_stream: Bool32::default(), extended_dynamic_state3_conservative_rasterization_mode: Bool32::default(), extended_dynamic_state3_extra_primitive_overestimation_size: Bool32::default(), extended_dynamic_state3_depth_clip_enable: Bool32::default(), extended_dynamic_state3_sample_locations_enable: Bool32::default(), extended_dynamic_state3_color_blend_advanced: Bool32::default(), extended_dynamic_state3_provoking_vertex_mode: Bool32::default(), extended_dynamic_state3_line_rasterization_mode: Bool32::default(), extended_dynamic_state3_line_stipple_enable: Bool32::default(), extended_dynamic_state3_depth_clip_negative_one_to_one: Bool32::default(), extended_dynamic_state3_viewport_w_scaling_enable: Bool32::default(), extended_dynamic_state3_viewport_swizzle: Bool32::default(), extended_dynamic_state3_coverage_to_color_enable: Bool32::default(), extended_dynamic_state3_coverage_to_color_location: Bool32::default(), extended_dynamic_state3_coverage_modulation_mode: Bool32::default(), extended_dynamic_state3_coverage_modulation_table_enable: Bool32::default(), extended_dynamic_state3_coverage_modulation_table: Bool32::default(), extended_dynamic_state3_coverage_reduction_mode: Bool32::default(), extended_dynamic_state3_representative_fragment_test_enable: Bool32::default(), extended_dynamic_state3_shading_rate_image_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState3FeaturesEXT<'_> {} impl<'a> PhysicalDeviceExtendedDynamicState3FeaturesEXT<'a> { #[inline] pub fn extended_dynamic_state3_tessellation_domain_origin( mut self, extended_dynamic_state3_tessellation_domain_origin: bool, ) -> Self { self.extended_dynamic_state3_tessellation_domain_origin = extended_dynamic_state3_tessellation_domain_origin.into(); self } #[inline] pub fn extended_dynamic_state3_depth_clamp_enable( mut self, extended_dynamic_state3_depth_clamp_enable: bool, ) -> Self { self.extended_dynamic_state3_depth_clamp_enable = extended_dynamic_state3_depth_clamp_enable.into(); self } #[inline] pub fn extended_dynamic_state3_polygon_mode( mut self, extended_dynamic_state3_polygon_mode: bool, ) -> Self { self.extended_dynamic_state3_polygon_mode = extended_dynamic_state3_polygon_mode.into(); self } #[inline] pub fn extended_dynamic_state3_rasterization_samples( mut self, extended_dynamic_state3_rasterization_samples: bool, ) -> Self { self.extended_dynamic_state3_rasterization_samples = extended_dynamic_state3_rasterization_samples.into(); self } #[inline] pub fn extended_dynamic_state3_sample_mask( mut self, extended_dynamic_state3_sample_mask: bool, ) -> Self { self.extended_dynamic_state3_sample_mask = extended_dynamic_state3_sample_mask.into(); self } #[inline] pub fn extended_dynamic_state3_alpha_to_coverage_enable( mut self, extended_dynamic_state3_alpha_to_coverage_enable: bool, ) -> Self { self.extended_dynamic_state3_alpha_to_coverage_enable = extended_dynamic_state3_alpha_to_coverage_enable.into(); self } #[inline] pub fn extended_dynamic_state3_alpha_to_one_enable( mut self, extended_dynamic_state3_alpha_to_one_enable: bool, ) -> Self { self.extended_dynamic_state3_alpha_to_one_enable = extended_dynamic_state3_alpha_to_one_enable.into(); self } #[inline] pub fn extended_dynamic_state3_logic_op_enable( mut self, extended_dynamic_state3_logic_op_enable: bool, ) -> Self { self.extended_dynamic_state3_logic_op_enable = extended_dynamic_state3_logic_op_enable.into(); self } #[inline] pub fn extended_dynamic_state3_color_blend_enable( mut self, extended_dynamic_state3_color_blend_enable: bool, ) -> Self { self.extended_dynamic_state3_color_blend_enable = extended_dynamic_state3_color_blend_enable.into(); self } #[inline] pub fn extended_dynamic_state3_color_blend_equation( mut self, extended_dynamic_state3_color_blend_equation: bool, ) -> Self { self.extended_dynamic_state3_color_blend_equation = extended_dynamic_state3_color_blend_equation.into(); self } #[inline] pub fn extended_dynamic_state3_color_write_mask( mut self, extended_dynamic_state3_color_write_mask: bool, ) -> Self { self.extended_dynamic_state3_color_write_mask = extended_dynamic_state3_color_write_mask.into(); self } #[inline] pub fn extended_dynamic_state3_rasterization_stream( mut self, extended_dynamic_state3_rasterization_stream: bool, ) -> Self { self.extended_dynamic_state3_rasterization_stream = extended_dynamic_state3_rasterization_stream.into(); self } #[inline] pub fn extended_dynamic_state3_conservative_rasterization_mode( mut self, extended_dynamic_state3_conservative_rasterization_mode: bool, ) -> Self { self.extended_dynamic_state3_conservative_rasterization_mode = extended_dynamic_state3_conservative_rasterization_mode.into(); self } #[inline] pub fn extended_dynamic_state3_extra_primitive_overestimation_size( mut self, extended_dynamic_state3_extra_primitive_overestimation_size: bool, ) -> Self { self.extended_dynamic_state3_extra_primitive_overestimation_size = extended_dynamic_state3_extra_primitive_overestimation_size.into(); self } #[inline] pub fn extended_dynamic_state3_depth_clip_enable( mut self, extended_dynamic_state3_depth_clip_enable: bool, ) -> Self { self.extended_dynamic_state3_depth_clip_enable = extended_dynamic_state3_depth_clip_enable.into(); self } #[inline] pub fn extended_dynamic_state3_sample_locations_enable( mut self, extended_dynamic_state3_sample_locations_enable: bool, ) -> Self { self.extended_dynamic_state3_sample_locations_enable = extended_dynamic_state3_sample_locations_enable.into(); self } #[inline] pub fn extended_dynamic_state3_color_blend_advanced( mut self, extended_dynamic_state3_color_blend_advanced: bool, ) -> Self { self.extended_dynamic_state3_color_blend_advanced = extended_dynamic_state3_color_blend_advanced.into(); self } #[inline] pub fn extended_dynamic_state3_provoking_vertex_mode( mut self, extended_dynamic_state3_provoking_vertex_mode: bool, ) -> Self { self.extended_dynamic_state3_provoking_vertex_mode = extended_dynamic_state3_provoking_vertex_mode.into(); self } #[inline] pub fn extended_dynamic_state3_line_rasterization_mode( mut self, extended_dynamic_state3_line_rasterization_mode: bool, ) -> Self { self.extended_dynamic_state3_line_rasterization_mode = extended_dynamic_state3_line_rasterization_mode.into(); self } #[inline] pub fn extended_dynamic_state3_line_stipple_enable( mut self, extended_dynamic_state3_line_stipple_enable: bool, ) -> Self { self.extended_dynamic_state3_line_stipple_enable = extended_dynamic_state3_line_stipple_enable.into(); self } #[inline] pub fn extended_dynamic_state3_depth_clip_negative_one_to_one( mut self, extended_dynamic_state3_depth_clip_negative_one_to_one: bool, ) -> Self { self.extended_dynamic_state3_depth_clip_negative_one_to_one = extended_dynamic_state3_depth_clip_negative_one_to_one.into(); self } #[inline] pub fn extended_dynamic_state3_viewport_w_scaling_enable( mut self, extended_dynamic_state3_viewport_w_scaling_enable: bool, ) -> Self { self.extended_dynamic_state3_viewport_w_scaling_enable = extended_dynamic_state3_viewport_w_scaling_enable.into(); self } #[inline] pub fn extended_dynamic_state3_viewport_swizzle( mut self, extended_dynamic_state3_viewport_swizzle: bool, ) -> Self { self.extended_dynamic_state3_viewport_swizzle = extended_dynamic_state3_viewport_swizzle.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_to_color_enable( mut self, extended_dynamic_state3_coverage_to_color_enable: bool, ) -> Self { self.extended_dynamic_state3_coverage_to_color_enable = extended_dynamic_state3_coverage_to_color_enable.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_to_color_location( mut self, extended_dynamic_state3_coverage_to_color_location: bool, ) -> Self { self.extended_dynamic_state3_coverage_to_color_location = extended_dynamic_state3_coverage_to_color_location.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_modulation_mode( mut self, extended_dynamic_state3_coverage_modulation_mode: bool, ) -> Self { self.extended_dynamic_state3_coverage_modulation_mode = extended_dynamic_state3_coverage_modulation_mode.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_modulation_table_enable( mut self, extended_dynamic_state3_coverage_modulation_table_enable: bool, ) -> Self { self.extended_dynamic_state3_coverage_modulation_table_enable = extended_dynamic_state3_coverage_modulation_table_enable.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_modulation_table( mut self, extended_dynamic_state3_coverage_modulation_table: bool, ) -> Self { self.extended_dynamic_state3_coverage_modulation_table = extended_dynamic_state3_coverage_modulation_table.into(); self } #[inline] pub fn extended_dynamic_state3_coverage_reduction_mode( mut self, extended_dynamic_state3_coverage_reduction_mode: bool, ) -> Self { self.extended_dynamic_state3_coverage_reduction_mode = extended_dynamic_state3_coverage_reduction_mode.into(); self } #[inline] pub fn extended_dynamic_state3_representative_fragment_test_enable( mut self, extended_dynamic_state3_representative_fragment_test_enable: bool, ) -> Self { self.extended_dynamic_state3_representative_fragment_test_enable = extended_dynamic_state3_representative_fragment_test_enable.into(); self } #[inline] pub fn extended_dynamic_state3_shading_rate_image_enable( mut self, extended_dynamic_state3_shading_rate_image_enable: bool, ) -> Self { self.extended_dynamic_state3_shading_rate_image_enable = extended_dynamic_state3_shading_rate_image_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedDynamicState3PropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub dynamic_primitive_topology_unrestricted: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedDynamicState3PropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceExtendedDynamicState3PropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedDynamicState3PropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), dynamic_primitive_topology_unrestricted: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedDynamicState3PropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExtendedDynamicState3PropertiesEXT<'_> { } impl<'a> PhysicalDeviceExtendedDynamicState3PropertiesEXT<'a> { #[inline] pub fn dynamic_primitive_topology_unrestricted( mut self, dynamic_primitive_topology_unrestricted: bool, ) -> Self { self.dynamic_primitive_topology_unrestricted = dynamic_primitive_topology_unrestricted.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ColorBlendEquationEXT { pub src_color_blend_factor: BlendFactor, pub dst_color_blend_factor: BlendFactor, pub color_blend_op: BlendOp, pub src_alpha_blend_factor: BlendFactor, pub dst_alpha_blend_factor: BlendFactor, pub alpha_blend_op: BlendOp, } impl ColorBlendEquationEXT { #[inline] pub fn src_color_blend_factor(mut self, src_color_blend_factor: BlendFactor) -> Self { self.src_color_blend_factor = src_color_blend_factor; self } #[inline] pub fn dst_color_blend_factor(mut self, dst_color_blend_factor: BlendFactor) -> Self { self.dst_color_blend_factor = dst_color_blend_factor; self } #[inline] pub fn color_blend_op(mut self, color_blend_op: BlendOp) -> Self { self.color_blend_op = color_blend_op; self } #[inline] pub fn src_alpha_blend_factor(mut self, src_alpha_blend_factor: BlendFactor) -> Self { self.src_alpha_blend_factor = src_alpha_blend_factor; self } #[inline] pub fn dst_alpha_blend_factor(mut self, dst_alpha_blend_factor: BlendFactor) -> Self { self.dst_alpha_blend_factor = dst_alpha_blend_factor; self } #[inline] pub fn alpha_blend_op(mut self, alpha_blend_op: BlendOp) -> Self { self.alpha_blend_op = alpha_blend_op; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct ColorBlendAdvancedEXT { pub advanced_blend_op: BlendOp, pub src_premultiplied: Bool32, pub dst_premultiplied: Bool32, pub blend_overlap: BlendOverlapEXT, pub clamp_results: Bool32, } impl ColorBlendAdvancedEXT { #[inline] pub fn advanced_blend_op(mut self, advanced_blend_op: BlendOp) -> Self { self.advanced_blend_op = advanced_blend_op; self } #[inline] pub fn src_premultiplied(mut self, src_premultiplied: bool) -> Self { self.src_premultiplied = src_premultiplied.into(); self } #[inline] pub fn dst_premultiplied(mut self, dst_premultiplied: bool) -> Self { self.dst_premultiplied = dst_premultiplied.into(); self } #[inline] pub fn blend_overlap(mut self, blend_overlap: BlendOverlapEXT) -> Self { self.blend_overlap = blend_overlap; self } #[inline] pub fn clamp_results(mut self, clamp_results: bool) -> Self { self.clamp_results = clamp_results.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassTransformBeginInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub transform: SurfaceTransformFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassTransformBeginInfoQCOM<'_> {} unsafe impl Sync for RenderPassTransformBeginInfoQCOM<'_> {} impl ::core::default::Default for RenderPassTransformBeginInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), transform: SurfaceTransformFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassTransformBeginInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM; } unsafe impl ExtendsRenderPassBeginInfo for RenderPassTransformBeginInfoQCOM<'_> {} impl<'a> RenderPassTransformBeginInfoQCOM<'a> { #[inline] pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self { self.transform = transform; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyCommandTransformInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub transform: SurfaceTransformFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyCommandTransformInfoQCOM<'_> {} unsafe impl Sync for CopyCommandTransformInfoQCOM<'_> {} impl ::core::default::Default for CopyCommandTransformInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), transform: SurfaceTransformFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyCommandTransformInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_COMMAND_TRANSFORM_INFO_QCOM; } unsafe impl ExtendsBufferImageCopy2 for CopyCommandTransformInfoQCOM<'_> {} unsafe impl ExtendsImageBlit2 for CopyCommandTransformInfoQCOM<'_> {} impl<'a> CopyCommandTransformInfoQCOM<'a> { #[inline] pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self { self.transform = transform; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferInheritanceRenderPassTransformInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub transform: SurfaceTransformFlagsKHR, pub render_area: Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferInheritanceRenderPassTransformInfoQCOM<'_> {} unsafe impl Sync for CommandBufferInheritanceRenderPassTransformInfoQCOM<'_> {} impl ::core::default::Default for CommandBufferInheritanceRenderPassTransformInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), transform: SurfaceTransformFlagsKHR::default(), render_area: Rect2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferInheritanceRenderPassTransformInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM; } unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceRenderPassTransformInfoQCOM<'_> { } impl<'a> CommandBufferInheritanceRenderPassTransformInfoQCOM<'a> { #[inline] pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self { self.transform = transform; self } #[inline] pub fn render_area(mut self, render_area: Rect2D) -> Self { self.render_area = render_area; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDiagnosticsConfigFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub diagnostics_config: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDiagnosticsConfigFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDiagnosticsConfigFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDiagnosticsConfigFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), diagnostics_config: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDiagnosticsConfigFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDiagnosticsConfigFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDiagnosticsConfigFeaturesNV<'_> {} impl<'a> PhysicalDeviceDiagnosticsConfigFeaturesNV<'a> { #[inline] pub fn diagnostics_config(mut self, diagnostics_config: bool) -> Self { self.diagnostics_config = diagnostics_config.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceDiagnosticsConfigCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: DeviceDiagnosticsConfigFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceDiagnosticsConfigCreateInfoNV<'_> {} unsafe impl Sync for DeviceDiagnosticsConfigCreateInfoNV<'_> {} impl ::core::default::Default for DeviceDiagnosticsConfigCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: DeviceDiagnosticsConfigFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceDiagnosticsConfigCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV; } unsafe impl ExtendsDeviceCreateInfo for DeviceDiagnosticsConfigCreateInfoNV<'_> {} impl<'a> DeviceDiagnosticsConfigCreateInfoNV<'a> { #[inline] pub fn flags(mut self, flags: DeviceDiagnosticsConfigFlagsNV) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_zero_initialize_workgroup_memory: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'_> {} unsafe impl Sync for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_zero_initialize_workgroup_memory: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'_> {} impl<'a> PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures<'a> { #[inline] pub fn shader_zero_initialize_workgroup_memory( mut self, shader_zero_initialize_workgroup_memory: bool, ) -> Self { self.shader_zero_initialize_workgroup_memory = shader_zero_initialize_workgroup_memory.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_subgroup_uniform_control_flow: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_subgroup_uniform_control_flow: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'_> { } impl<'a> PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR<'a> { #[inline] pub fn shader_subgroup_uniform_control_flow( mut self, shader_subgroup_uniform_control_flow: bool, ) -> Self { self.shader_subgroup_uniform_control_flow = shader_subgroup_uniform_control_flow.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRobustness2FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub robust_buffer_access2: Bool32, pub robust_image_access2: Bool32, pub null_descriptor: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRobustness2FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceRobustness2FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceRobustness2FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), robust_buffer_access2: Bool32::default(), robust_image_access2: Bool32::default(), null_descriptor: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRobustness2FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRobustness2FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRobustness2FeaturesEXT<'_> {} impl<'a> PhysicalDeviceRobustness2FeaturesEXT<'a> { #[inline] pub fn robust_buffer_access2(mut self, robust_buffer_access2: bool) -> Self { self.robust_buffer_access2 = robust_buffer_access2.into(); self } #[inline] pub fn robust_image_access2(mut self, robust_image_access2: bool) -> Self { self.robust_image_access2 = robust_image_access2.into(); self } #[inline] pub fn null_descriptor(mut self, null_descriptor: bool) -> Self { self.null_descriptor = null_descriptor.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRobustness2PropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub robust_storage_buffer_access_size_alignment: DeviceSize, pub robust_uniform_buffer_access_size_alignment: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRobustness2PropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceRobustness2PropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceRobustness2PropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), robust_storage_buffer_access_size_alignment: DeviceSize::default(), robust_uniform_buffer_access_size_alignment: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRobustness2PropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRobustness2PropertiesEXT<'_> {} impl<'a> PhysicalDeviceRobustness2PropertiesEXT<'a> { #[inline] pub fn robust_storage_buffer_access_size_alignment( mut self, robust_storage_buffer_access_size_alignment: DeviceSize, ) -> Self { self.robust_storage_buffer_access_size_alignment = robust_storage_buffer_access_size_alignment; self } #[inline] pub fn robust_uniform_buffer_access_size_alignment( mut self, robust_uniform_buffer_access_size_alignment: DeviceSize, ) -> Self { self.robust_uniform_buffer_access_size_alignment = robust_uniform_buffer_access_size_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageRobustnessFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub robust_image_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageRobustnessFeatures<'_> {} unsafe impl Sync for PhysicalDeviceImageRobustnessFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceImageRobustnessFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), robust_image_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageRobustnessFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageRobustnessFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageRobustnessFeatures<'_> {} impl<'a> PhysicalDeviceImageRobustnessFeatures<'a> { #[inline] pub fn robust_image_access(mut self, robust_image_access: bool) -> Self { self.robust_image_access = robust_image_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub workgroup_memory_explicit_layout: Bool32, pub workgroup_memory_explicit_layout_scalar_block_layout: Bool32, pub workgroup_memory_explicit_layout8_bit_access: Bool32, pub workgroup_memory_explicit_layout16_bit_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), workgroup_memory_explicit_layout: Bool32::default(), workgroup_memory_explicit_layout_scalar_block_layout: Bool32::default(), workgroup_memory_explicit_layout8_bit_access: Bool32::default(), workgroup_memory_explicit_layout16_bit_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'_> {} impl<'a> PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR<'a> { #[inline] pub fn workgroup_memory_explicit_layout( mut self, workgroup_memory_explicit_layout: bool, ) -> Self { self.workgroup_memory_explicit_layout = workgroup_memory_explicit_layout.into(); self } #[inline] pub fn workgroup_memory_explicit_layout_scalar_block_layout( mut self, workgroup_memory_explicit_layout_scalar_block_layout: bool, ) -> Self { self.workgroup_memory_explicit_layout_scalar_block_layout = workgroup_memory_explicit_layout_scalar_block_layout.into(); self } #[inline] pub fn workgroup_memory_explicit_layout8_bit_access( mut self, workgroup_memory_explicit_layout8_bit_access: bool, ) -> Self { self.workgroup_memory_explicit_layout8_bit_access = workgroup_memory_explicit_layout8_bit_access.into(); self } #[inline] pub fn workgroup_memory_explicit_layout16_bit_access( mut self, workgroup_memory_explicit_layout16_bit_access: bool, ) -> Self { self.workgroup_memory_explicit_layout16_bit_access = workgroup_memory_explicit_layout16_bit_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePortabilitySubsetFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub constant_alpha_color_blend_factors: Bool32, pub events: Bool32, pub image_view_format_reinterpretation: Bool32, pub image_view_format_swizzle: Bool32, pub image_view2_d_on3_d_image: Bool32, pub multisample_array_image: Bool32, pub mutable_comparison_samplers: Bool32, pub point_polygons: Bool32, pub sampler_mip_lod_bias: Bool32, pub separate_stencil_mask_ref: Bool32, pub shader_sample_rate_interpolation_functions: Bool32, pub tessellation_isolines: Bool32, pub tessellation_point_mode: Bool32, pub triangle_fans: Bool32, pub vertex_attribute_access_beyond_stride: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePortabilitySubsetFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDevicePortabilitySubsetFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePortabilitySubsetFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), constant_alpha_color_blend_factors: Bool32::default(), events: Bool32::default(), image_view_format_reinterpretation: Bool32::default(), image_view_format_swizzle: Bool32::default(), image_view2_d_on3_d_image: Bool32::default(), multisample_array_image: Bool32::default(), mutable_comparison_samplers: Bool32::default(), point_polygons: Bool32::default(), sampler_mip_lod_bias: Bool32::default(), separate_stencil_mask_ref: Bool32::default(), shader_sample_rate_interpolation_functions: Bool32::default(), tessellation_isolines: Bool32::default(), tessellation_point_mode: Bool32::default(), triangle_fans: Bool32::default(), vertex_attribute_access_beyond_stride: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePortabilitySubsetFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePortabilitySubsetFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePortabilitySubsetFeaturesKHR<'_> {} impl<'a> PhysicalDevicePortabilitySubsetFeaturesKHR<'a> { #[inline] pub fn constant_alpha_color_blend_factors( mut self, constant_alpha_color_blend_factors: bool, ) -> Self { self.constant_alpha_color_blend_factors = constant_alpha_color_blend_factors.into(); self } #[inline] pub fn events(mut self, events: bool) -> Self { self.events = events.into(); self } #[inline] pub fn image_view_format_reinterpretation( mut self, image_view_format_reinterpretation: bool, ) -> Self { self.image_view_format_reinterpretation = image_view_format_reinterpretation.into(); self } #[inline] pub fn image_view_format_swizzle(mut self, image_view_format_swizzle: bool) -> Self { self.image_view_format_swizzle = image_view_format_swizzle.into(); self } #[inline] pub fn image_view2_d_on3_d_image(mut self, image_view2_d_on3_d_image: bool) -> Self { self.image_view2_d_on3_d_image = image_view2_d_on3_d_image.into(); self } #[inline] pub fn multisample_array_image(mut self, multisample_array_image: bool) -> Self { self.multisample_array_image = multisample_array_image.into(); self } #[inline] pub fn mutable_comparison_samplers(mut self, mutable_comparison_samplers: bool) -> Self { self.mutable_comparison_samplers = mutable_comparison_samplers.into(); self } #[inline] pub fn point_polygons(mut self, point_polygons: bool) -> Self { self.point_polygons = point_polygons.into(); self } #[inline] pub fn sampler_mip_lod_bias(mut self, sampler_mip_lod_bias: bool) -> Self { self.sampler_mip_lod_bias = sampler_mip_lod_bias.into(); self } #[inline] pub fn separate_stencil_mask_ref(mut self, separate_stencil_mask_ref: bool) -> Self { self.separate_stencil_mask_ref = separate_stencil_mask_ref.into(); self } #[inline] pub fn shader_sample_rate_interpolation_functions( mut self, shader_sample_rate_interpolation_functions: bool, ) -> Self { self.shader_sample_rate_interpolation_functions = shader_sample_rate_interpolation_functions.into(); self } #[inline] pub fn tessellation_isolines(mut self, tessellation_isolines: bool) -> Self { self.tessellation_isolines = tessellation_isolines.into(); self } #[inline] pub fn tessellation_point_mode(mut self, tessellation_point_mode: bool) -> Self { self.tessellation_point_mode = tessellation_point_mode.into(); self } #[inline] pub fn triangle_fans(mut self, triangle_fans: bool) -> Self { self.triangle_fans = triangle_fans.into(); self } #[inline] pub fn vertex_attribute_access_beyond_stride( mut self, vertex_attribute_access_beyond_stride: bool, ) -> Self { self.vertex_attribute_access_beyond_stride = vertex_attribute_access_beyond_stride.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePortabilitySubsetPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_vertex_input_binding_stride_alignment: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePortabilitySubsetPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDevicePortabilitySubsetPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDevicePortabilitySubsetPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_vertex_input_binding_stride_alignment: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePortabilitySubsetPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePortabilitySubsetPropertiesKHR<'_> {} impl<'a> PhysicalDevicePortabilitySubsetPropertiesKHR<'a> { #[inline] pub fn min_vertex_input_binding_stride_alignment( mut self, min_vertex_input_binding_stride_alignment: u32, ) -> Self { self.min_vertex_input_binding_stride_alignment = min_vertex_input_binding_stride_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevice4444FormatsFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format_a4r4g4b4: Bool32, pub format_a4b4g4r4: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevice4444FormatsFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevice4444FormatsFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevice4444FormatsFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format_a4r4g4b4: Bool32::default(), format_a4b4g4r4: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevice4444FormatsFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice4444FormatsFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice4444FormatsFeaturesEXT<'_> {} impl<'a> PhysicalDevice4444FormatsFeaturesEXT<'a> { #[inline] pub fn format_a4r4g4b4(mut self, format_a4r4g4b4: bool) -> Self { self.format_a4r4g4b4 = format_a4r4g4b4.into(); self } #[inline] pub fn format_a4b4g4r4(mut self, format_a4b4g4r4: bool) -> Self { self.format_a4b4g4r4 = format_a4b4g4r4.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubpassShadingFeaturesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subpass_shading: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subpass_shading: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassShadingFeaturesHUAWEI<'_> {} impl<'a> PhysicalDeviceSubpassShadingFeaturesHUAWEI<'a> { #[inline] pub fn subpass_shading(mut self, subpass_shading: bool) -> Self { self.subpass_shading = subpass_shading.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub clusterculling_shader: Bool32, pub multiview_cluster_culling_shader: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), clusterculling_shader: Bool32::default(), multiview_cluster_culling_shader: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'_> {} pub unsafe trait ExtendsPhysicalDeviceClusterCullingShaderFeaturesHUAWEI {} impl<'a> PhysicalDeviceClusterCullingShaderFeaturesHUAWEI<'a> { #[inline] pub fn clusterculling_shader(mut self, clusterculling_shader: bool) -> Self { self.clusterculling_shader = clusterculling_shader.into(); self } #[inline] pub fn multiview_cluster_culling_shader( mut self, multiview_cluster_culling_shader: bool, ) -> Self { self.multiview_cluster_culling_shader = multiview_cluster_culling_shader.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cluster_shading_rate: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'_> {} unsafe impl Sync for PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'_> {} impl ::core::default::Default for PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cluster_shading_rate: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI; } unsafe impl ExtendsPhysicalDeviceClusterCullingShaderFeaturesHUAWEI for PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'_> { } impl<'a> PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI<'a> { #[inline] pub fn cluster_shading_rate(mut self, cluster_shading_rate: bool) -> Self { self.cluster_shading_rate = cluster_shading_rate.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCopy2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_offset: DeviceSize, pub dst_offset: DeviceSize, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCopy2<'_> {} unsafe impl Sync for BufferCopy2<'_> {} impl ::core::default::Default for BufferCopy2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_offset: DeviceSize::default(), dst_offset: DeviceSize::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCopy2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COPY_2; } impl<'a> BufferCopy2<'a> { #[inline] pub fn src_offset(mut self, src_offset: DeviceSize) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_offset(mut self, dst_offset: DeviceSize) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageCopy2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_subresource: ImageSubresourceLayers, pub src_offset: Offset3D, pub dst_subresource: ImageSubresourceLayers, pub dst_offset: Offset3D, pub extent: Extent3D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageCopy2<'_> {} unsafe impl Sync for ImageCopy2<'_> {} impl ::core::default::Default for ImageCopy2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_subresource: ImageSubresourceLayers::default(), src_offset: Offset3D::default(), dst_subresource: ImageSubresourceLayers::default(), dst_offset: Offset3D::default(), extent: Extent3D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageCopy2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COPY_2; } impl<'a> ImageCopy2<'a> { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offset(mut self, src_offset: Offset3D) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageBlit2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_subresource: ImageSubresourceLayers, pub src_offsets: [Offset3D; 2], pub dst_subresource: ImageSubresourceLayers, pub dst_offsets: [Offset3D; 2], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageBlit2<'_> {} unsafe impl Sync for ImageBlit2<'_> {} impl ::core::default::Default for ImageBlit2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_subresource: ImageSubresourceLayers::default(), src_offsets: unsafe { ::core::mem::zeroed() }, dst_subresource: ImageSubresourceLayers::default(), dst_offsets: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageBlit2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_BLIT_2; } pub unsafe trait ExtendsImageBlit2 {} impl<'a> ImageBlit2<'a> { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offsets(mut self, src_offsets: [Offset3D; 2]) -> Self { self.src_offsets = src_offsets; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offsets(mut self, dst_offsets: [Offset3D; 2]) -> Self { self.dst_offsets = dst_offsets; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferImageCopy2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer_offset: DeviceSize, pub buffer_row_length: u32, pub buffer_image_height: u32, pub image_subresource: ImageSubresourceLayers, pub image_offset: Offset3D, pub image_extent: Extent3D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferImageCopy2<'_> {} unsafe impl Sync for BufferImageCopy2<'_> {} impl ::core::default::Default for BufferImageCopy2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer_offset: DeviceSize::default(), buffer_row_length: u32::default(), buffer_image_height: u32::default(), image_subresource: ImageSubresourceLayers::default(), image_offset: Offset3D::default(), image_extent: Extent3D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferImageCopy2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_IMAGE_COPY_2; } pub unsafe trait ExtendsBufferImageCopy2 {} impl<'a> BufferImageCopy2<'a> { #[inline] pub fn buffer_offset(mut self, buffer_offset: DeviceSize) -> Self { self.buffer_offset = buffer_offset; self } #[inline] pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self { self.buffer_row_length = buffer_row_length; self } #[inline] pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self { self.buffer_image_height = buffer_image_height; self } #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self { self.image_subresource = image_subresource; self } #[inline] pub fn image_offset(mut self, image_offset: Offset3D) -> Self { self.image_offset = image_offset; self } #[inline] pub fn image_extent(mut self, image_extent: Extent3D) -> Self { self.image_extent = image_extent; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageResolve2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_subresource: ImageSubresourceLayers, pub src_offset: Offset3D, pub dst_subresource: ImageSubresourceLayers, pub dst_offset: Offset3D, pub extent: Extent3D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageResolve2<'_> {} unsafe impl Sync for ImageResolve2<'_> {} impl ::core::default::Default for ImageResolve2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_subresource: ImageSubresourceLayers::default(), src_offset: Offset3D::default(), dst_subresource: ImageSubresourceLayers::default(), dst_offset: Offset3D::default(), extent: Extent3D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageResolve2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_RESOLVE_2; } impl<'a> ImageResolve2<'a> { #[inline] pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self { self.src_subresource = src_subresource; self } #[inline] pub fn src_offset(mut self, src_offset: Offset3D) -> Self { self.src_offset = src_offset; self } #[inline] pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self { self.dst_subresource = dst_subresource; self } #[inline] pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self { self.dst_offset = dst_offset; self } #[inline] pub fn extent(mut self, extent: Extent3D) -> Self { self.extent = extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyBufferInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_buffer: Buffer, pub dst_buffer: Buffer, pub region_count: u32, pub p_regions: *const BufferCopy2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyBufferInfo2<'_> {} unsafe impl Sync for CopyBufferInfo2<'_> {} impl ::core::default::Default for CopyBufferInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_buffer: Buffer::default(), dst_buffer: Buffer::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyBufferInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_BUFFER_INFO_2; } impl<'a> CopyBufferInfo2<'a> { #[inline] pub fn src_buffer(mut self, src_buffer: Buffer) -> Self { self.src_buffer = src_buffer; self } #[inline] pub fn dst_buffer(mut self, dst_buffer: Buffer) -> Self { self.dst_buffer = dst_buffer; self } #[inline] pub fn regions(mut self, regions: &'a [BufferCopy2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyImageInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_image: Image, pub src_image_layout: ImageLayout, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const ImageCopy2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyImageInfo2<'_> {} unsafe impl Sync for CopyImageInfo2<'_> {} impl ::core::default::Default for CopyImageInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_image: Image::default(), src_image_layout: ImageLayout::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyImageInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_INFO_2; } impl<'a> CopyImageInfo2<'a> { #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [ImageCopy2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BlitImageInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_image: Image, pub src_image_layout: ImageLayout, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const ImageBlit2<'a>, pub filter: Filter, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BlitImageInfo2<'_> {} unsafe impl Sync for BlitImageInfo2<'_> {} impl ::core::default::Default for BlitImageInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_image: Image::default(), src_image_layout: ImageLayout::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), filter: Filter::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BlitImageInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BLIT_IMAGE_INFO_2; } pub unsafe trait ExtendsBlitImageInfo2 {} impl<'a> BlitImageInfo2<'a> { #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [ImageBlit2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } #[inline] pub fn filter(mut self, filter: Filter) -> Self { self.filter = filter; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyBufferToImageInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_buffer: Buffer, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const BufferImageCopy2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyBufferToImageInfo2<'_> {} unsafe impl Sync for CopyBufferToImageInfo2<'_> {} impl ::core::default::Default for CopyBufferToImageInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_buffer: Buffer::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyBufferToImageInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_BUFFER_TO_IMAGE_INFO_2; } impl<'a> CopyBufferToImageInfo2<'a> { #[inline] pub fn src_buffer(mut self, src_buffer: Buffer) -> Self { self.src_buffer = src_buffer; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [BufferImageCopy2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyImageToBufferInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_image: Image, pub src_image_layout: ImageLayout, pub dst_buffer: Buffer, pub region_count: u32, pub p_regions: *const BufferImageCopy2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyImageToBufferInfo2<'_> {} unsafe impl Sync for CopyImageToBufferInfo2<'_> {} impl ::core::default::Default for CopyImageToBufferInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_image: Image::default(), src_image_layout: ImageLayout::default(), dst_buffer: Buffer::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyImageToBufferInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_TO_BUFFER_INFO_2; } impl<'a> CopyImageToBufferInfo2<'a> { #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn dst_buffer(mut self, dst_buffer: Buffer) -> Self { self.dst_buffer = dst_buffer; self } #[inline] pub fn regions(mut self, regions: &'a [BufferImageCopy2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ResolveImageInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_image: Image, pub src_image_layout: ImageLayout, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const ImageResolve2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ResolveImageInfo2<'_> {} unsafe impl Sync for ResolveImageInfo2<'_> {} impl ::core::default::Default for ResolveImageInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_image: Image::default(), src_image_layout: ImageLayout::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ResolveImageInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RESOLVE_IMAGE_INFO_2; } impl<'a> ResolveImageInfo2<'a> { #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [ImageResolve2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_image_int64_atomics: Bool32, pub sparse_image_int64_atomics: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_image_int64_atomics: Bool32::default(), sparse_image_int64_atomics: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderImageAtomicInt64FeaturesEXT<'a> { #[inline] pub fn shader_image_int64_atomics(mut self, shader_image_int64_atomics: bool) -> Self { self.shader_image_int64_atomics = shader_image_int64_atomics.into(); self } #[inline] pub fn sparse_image_int64_atomics(mut self, sparse_image_int64_atomics: bool) -> Self { self.sparse_image_int64_atomics = sparse_image_int64_atomics.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FragmentShadingRateAttachmentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_fragment_shading_rate_attachment: *const AttachmentReference2<'a>, pub shading_rate_attachment_texel_size: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FragmentShadingRateAttachmentInfoKHR<'_> {} unsafe impl Sync for FragmentShadingRateAttachmentInfoKHR<'_> {} impl ::core::default::Default for FragmentShadingRateAttachmentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_fragment_shading_rate_attachment: ::core::ptr::null(), shading_rate_attachment_texel_size: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FragmentShadingRateAttachmentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR; } unsafe impl ExtendsSubpassDescription2 for FragmentShadingRateAttachmentInfoKHR<'_> {} impl<'a> FragmentShadingRateAttachmentInfoKHR<'a> { #[inline] pub fn fragment_shading_rate_attachment( mut self, fragment_shading_rate_attachment: &'a AttachmentReference2<'a>, ) -> Self { self.p_fragment_shading_rate_attachment = fragment_shading_rate_attachment; self } #[inline] pub fn shading_rate_attachment_texel_size( mut self, shading_rate_attachment_texel_size: Extent2D, ) -> Self { self.shading_rate_attachment_texel_size = shading_rate_attachment_texel_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineFragmentShadingRateStateCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub fragment_size: Extent2D, pub combiner_ops: [FragmentShadingRateCombinerOpKHR; 2], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineFragmentShadingRateStateCreateInfoKHR<'_> {} unsafe impl Sync for PipelineFragmentShadingRateStateCreateInfoKHR<'_> {} impl ::core::default::Default for PipelineFragmentShadingRateStateCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), fragment_size: Extent2D::default(), combiner_ops: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineFragmentShadingRateStateCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineFragmentShadingRateStateCreateInfoKHR<'_> { } impl<'a> PipelineFragmentShadingRateStateCreateInfoKHR<'a> { #[inline] pub fn fragment_size(mut self, fragment_size: Extent2D) -> Self { self.fragment_size = fragment_size; self } #[inline] pub fn combiner_ops(mut self, combiner_ops: [FragmentShadingRateCombinerOpKHR; 2]) -> Self { self.combiner_ops = combiner_ops; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShadingRateFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_fragment_shading_rate: Bool32, pub primitive_fragment_shading_rate: Bool32, pub attachment_fragment_shading_rate: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShadingRateFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShadingRateFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShadingRateFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_fragment_shading_rate: Bool32::default(), primitive_fragment_shading_rate: Bool32::default(), attachment_fragment_shading_rate: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShadingRateFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShadingRateFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShadingRateFeaturesKHR<'_> {} impl<'a> PhysicalDeviceFragmentShadingRateFeaturesKHR<'a> { #[inline] pub fn pipeline_fragment_shading_rate(mut self, pipeline_fragment_shading_rate: bool) -> Self { self.pipeline_fragment_shading_rate = pipeline_fragment_shading_rate.into(); self } #[inline] pub fn primitive_fragment_shading_rate( mut self, primitive_fragment_shading_rate: bool, ) -> Self { self.primitive_fragment_shading_rate = primitive_fragment_shading_rate.into(); self } #[inline] pub fn attachment_fragment_shading_rate( mut self, attachment_fragment_shading_rate: bool, ) -> Self { self.attachment_fragment_shading_rate = attachment_fragment_shading_rate.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShadingRatePropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_fragment_shading_rate_attachment_texel_size: Extent2D, pub max_fragment_shading_rate_attachment_texel_size: Extent2D, pub max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32, pub primitive_fragment_shading_rate_with_multiple_viewports: Bool32, pub layered_shading_rate_attachments: Bool32, pub fragment_shading_rate_non_trivial_combiner_ops: Bool32, pub max_fragment_size: Extent2D, pub max_fragment_size_aspect_ratio: u32, pub max_fragment_shading_rate_coverage_samples: u32, pub max_fragment_shading_rate_rasterization_samples: SampleCountFlags, pub fragment_shading_rate_with_shader_depth_stencil_writes: Bool32, pub fragment_shading_rate_with_sample_mask: Bool32, pub fragment_shading_rate_with_shader_sample_mask: Bool32, pub fragment_shading_rate_with_conservative_rasterization: Bool32, pub fragment_shading_rate_with_fragment_shader_interlock: Bool32, pub fragment_shading_rate_with_custom_sample_locations: Bool32, pub fragment_shading_rate_strict_multiply_combiner: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShadingRatePropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShadingRatePropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShadingRatePropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_fragment_shading_rate_attachment_texel_size: Extent2D::default(), max_fragment_shading_rate_attachment_texel_size: Extent2D::default(), max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32::default(), primitive_fragment_shading_rate_with_multiple_viewports: Bool32::default(), layered_shading_rate_attachments: Bool32::default(), fragment_shading_rate_non_trivial_combiner_ops: Bool32::default(), max_fragment_size: Extent2D::default(), max_fragment_size_aspect_ratio: u32::default(), max_fragment_shading_rate_coverage_samples: u32::default(), max_fragment_shading_rate_rasterization_samples: SampleCountFlags::default(), fragment_shading_rate_with_shader_depth_stencil_writes: Bool32::default(), fragment_shading_rate_with_sample_mask: Bool32::default(), fragment_shading_rate_with_shader_sample_mask: Bool32::default(), fragment_shading_rate_with_conservative_rasterization: Bool32::default(), fragment_shading_rate_with_fragment_shader_interlock: Bool32::default(), fragment_shading_rate_with_custom_sample_locations: Bool32::default(), fragment_shading_rate_strict_multiply_combiner: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShadingRatePropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentShadingRatePropertiesKHR<'_> { } impl<'a> PhysicalDeviceFragmentShadingRatePropertiesKHR<'a> { #[inline] pub fn min_fragment_shading_rate_attachment_texel_size( mut self, min_fragment_shading_rate_attachment_texel_size: Extent2D, ) -> Self { self.min_fragment_shading_rate_attachment_texel_size = min_fragment_shading_rate_attachment_texel_size; self } #[inline] pub fn max_fragment_shading_rate_attachment_texel_size( mut self, max_fragment_shading_rate_attachment_texel_size: Extent2D, ) -> Self { self.max_fragment_shading_rate_attachment_texel_size = max_fragment_shading_rate_attachment_texel_size; self } #[inline] pub fn max_fragment_shading_rate_attachment_texel_size_aspect_ratio( mut self, max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32, ) -> Self { self.max_fragment_shading_rate_attachment_texel_size_aspect_ratio = max_fragment_shading_rate_attachment_texel_size_aspect_ratio; self } #[inline] pub fn primitive_fragment_shading_rate_with_multiple_viewports( mut self, primitive_fragment_shading_rate_with_multiple_viewports: bool, ) -> Self { self.primitive_fragment_shading_rate_with_multiple_viewports = primitive_fragment_shading_rate_with_multiple_viewports.into(); self } #[inline] pub fn layered_shading_rate_attachments( mut self, layered_shading_rate_attachments: bool, ) -> Self { self.layered_shading_rate_attachments = layered_shading_rate_attachments.into(); self } #[inline] pub fn fragment_shading_rate_non_trivial_combiner_ops( mut self, fragment_shading_rate_non_trivial_combiner_ops: bool, ) -> Self { self.fragment_shading_rate_non_trivial_combiner_ops = fragment_shading_rate_non_trivial_combiner_ops.into(); self } #[inline] pub fn max_fragment_size(mut self, max_fragment_size: Extent2D) -> Self { self.max_fragment_size = max_fragment_size; self } #[inline] pub fn max_fragment_size_aspect_ratio(mut self, max_fragment_size_aspect_ratio: u32) -> Self { self.max_fragment_size_aspect_ratio = max_fragment_size_aspect_ratio; self } #[inline] pub fn max_fragment_shading_rate_coverage_samples( mut self, max_fragment_shading_rate_coverage_samples: u32, ) -> Self { self.max_fragment_shading_rate_coverage_samples = max_fragment_shading_rate_coverage_samples; self } #[inline] pub fn max_fragment_shading_rate_rasterization_samples( mut self, max_fragment_shading_rate_rasterization_samples: SampleCountFlags, ) -> Self { self.max_fragment_shading_rate_rasterization_samples = max_fragment_shading_rate_rasterization_samples; self } #[inline] pub fn fragment_shading_rate_with_shader_depth_stencil_writes( mut self, fragment_shading_rate_with_shader_depth_stencil_writes: bool, ) -> Self { self.fragment_shading_rate_with_shader_depth_stencil_writes = fragment_shading_rate_with_shader_depth_stencil_writes.into(); self } #[inline] pub fn fragment_shading_rate_with_sample_mask( mut self, fragment_shading_rate_with_sample_mask: bool, ) -> Self { self.fragment_shading_rate_with_sample_mask = fragment_shading_rate_with_sample_mask.into(); self } #[inline] pub fn fragment_shading_rate_with_shader_sample_mask( mut self, fragment_shading_rate_with_shader_sample_mask: bool, ) -> Self { self.fragment_shading_rate_with_shader_sample_mask = fragment_shading_rate_with_shader_sample_mask.into(); self } #[inline] pub fn fragment_shading_rate_with_conservative_rasterization( mut self, fragment_shading_rate_with_conservative_rasterization: bool, ) -> Self { self.fragment_shading_rate_with_conservative_rasterization = fragment_shading_rate_with_conservative_rasterization.into(); self } #[inline] pub fn fragment_shading_rate_with_fragment_shader_interlock( mut self, fragment_shading_rate_with_fragment_shader_interlock: bool, ) -> Self { self.fragment_shading_rate_with_fragment_shader_interlock = fragment_shading_rate_with_fragment_shader_interlock.into(); self } #[inline] pub fn fragment_shading_rate_with_custom_sample_locations( mut self, fragment_shading_rate_with_custom_sample_locations: bool, ) -> Self { self.fragment_shading_rate_with_custom_sample_locations = fragment_shading_rate_with_custom_sample_locations.into(); self } #[inline] pub fn fragment_shading_rate_strict_multiply_combiner( mut self, fragment_shading_rate_strict_multiply_combiner: bool, ) -> Self { self.fragment_shading_rate_strict_multiply_combiner = fragment_shading_rate_strict_multiply_combiner.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShadingRateKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub sample_counts: SampleCountFlags, pub fragment_size: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShadingRateKHR<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShadingRateKHR<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShadingRateKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), sample_counts: SampleCountFlags::default(), fragment_size: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShadingRateKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR; } impl<'a> PhysicalDeviceFragmentShadingRateKHR<'a> { #[inline] pub fn sample_counts(mut self, sample_counts: SampleCountFlags) -> Self { self.sample_counts = sample_counts; self } #[inline] pub fn fragment_size(mut self, fragment_size: Extent2D) -> Self { self.fragment_size = fragment_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderTerminateInvocationFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_terminate_invocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderTerminateInvocationFeatures<'_> {} unsafe impl Sync for PhysicalDeviceShaderTerminateInvocationFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceShaderTerminateInvocationFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_terminate_invocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderTerminateInvocationFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderTerminateInvocationFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderTerminateInvocationFeatures<'_> {} impl<'a> PhysicalDeviceShaderTerminateInvocationFeatures<'a> { #[inline] pub fn shader_terminate_invocation(mut self, shader_terminate_invocation: bool) -> Self { self.shader_terminate_invocation = shader_terminate_invocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_shading_rate_enums: Bool32, pub supersample_fragment_shading_rates: Bool32, pub no_invocation_fragment_shading_rates: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_shading_rate_enums: Bool32::default(), supersample_fragment_shading_rates: Bool32::default(), no_invocation_fragment_shading_rates: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'_> {} impl<'a> PhysicalDeviceFragmentShadingRateEnumsFeaturesNV<'a> { #[inline] pub fn fragment_shading_rate_enums(mut self, fragment_shading_rate_enums: bool) -> Self { self.fragment_shading_rate_enums = fragment_shading_rate_enums.into(); self } #[inline] pub fn supersample_fragment_shading_rates( mut self, supersample_fragment_shading_rates: bool, ) -> Self { self.supersample_fragment_shading_rates = supersample_fragment_shading_rates.into(); self } #[inline] pub fn no_invocation_fragment_shading_rates( mut self, no_invocation_fragment_shading_rates: bool, ) -> Self { self.no_invocation_fragment_shading_rates = no_invocation_fragment_shading_rates.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_fragment_shading_rate_invocation_count: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_fragment_shading_rate_invocation_count: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'_> { } impl<'a> PhysicalDeviceFragmentShadingRateEnumsPropertiesNV<'a> { #[inline] pub fn max_fragment_shading_rate_invocation_count( mut self, max_fragment_shading_rate_invocation_count: SampleCountFlags, ) -> Self { self.max_fragment_shading_rate_invocation_count = max_fragment_shading_rate_invocation_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineFragmentShadingRateEnumStateCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub shading_rate_type: FragmentShadingRateTypeNV, pub shading_rate: FragmentShadingRateNV, pub combiner_ops: [FragmentShadingRateCombinerOpKHR; 2], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineFragmentShadingRateEnumStateCreateInfoNV<'_> {} unsafe impl Sync for PipelineFragmentShadingRateEnumStateCreateInfoNV<'_> {} impl ::core::default::Default for PipelineFragmentShadingRateEnumStateCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), shading_rate_type: FragmentShadingRateTypeNV::default(), shading_rate: FragmentShadingRateNV::default(), combiner_ops: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineFragmentShadingRateEnumStateCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineFragmentShadingRateEnumStateCreateInfoNV<'_> { } impl<'a> PipelineFragmentShadingRateEnumStateCreateInfoNV<'a> { #[inline] pub fn shading_rate_type(mut self, shading_rate_type: FragmentShadingRateTypeNV) -> Self { self.shading_rate_type = shading_rate_type; self } #[inline] pub fn shading_rate(mut self, shading_rate: FragmentShadingRateNV) -> Self { self.shading_rate = shading_rate; self } #[inline] pub fn combiner_ops(mut self, combiner_ops: [FragmentShadingRateCombinerOpKHR; 2]) -> Self { self.combiner_ops = combiner_ops; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureBuildSizesInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure_size: DeviceSize, pub update_scratch_size: DeviceSize, pub build_scratch_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureBuildSizesInfoKHR<'_> {} unsafe impl Sync for AccelerationStructureBuildSizesInfoKHR<'_> {} impl ::core::default::Default for AccelerationStructureBuildSizesInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure_size: DeviceSize::default(), update_scratch_size: DeviceSize::default(), build_scratch_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureBuildSizesInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR; } impl<'a> AccelerationStructureBuildSizesInfoKHR<'a> { #[inline] pub fn acceleration_structure_size(mut self, acceleration_structure_size: DeviceSize) -> Self { self.acceleration_structure_size = acceleration_structure_size; self } #[inline] pub fn update_scratch_size(mut self, update_scratch_size: DeviceSize) -> Self { self.update_scratch_size = update_scratch_size; self } #[inline] pub fn build_scratch_size(mut self, build_scratch_size: DeviceSize) -> Self { self.build_scratch_size = build_scratch_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImage2DViewOf3DFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image2_d_view_of3_d: Bool32, pub sampler2_d_view_of3_d: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image2_d_view_of3_d: Bool32::default(), sampler2_d_view_of3_d: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImage2DViewOf3DFeaturesEXT<'_> {} impl<'a> PhysicalDeviceImage2DViewOf3DFeaturesEXT<'a> { #[inline] pub fn image2_d_view_of3_d(mut self, image2_d_view_of3_d: bool) -> Self { self.image2_d_view_of3_d = image2_d_view_of3_d.into(); self } #[inline] pub fn sampler2_d_view_of3_d(mut self, sampler2_d_view_of3_d: bool) -> Self { self.sampler2_d_view_of3_d = sampler2_d_view_of3_d.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_sliced_view_of3_d: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_sliced_view_of3_d: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'_> {} impl<'a> PhysicalDeviceImageSlicedViewOf3DFeaturesEXT<'a> { #[inline] pub fn image_sliced_view_of3_d(mut self, image_sliced_view_of3_d: bool) -> Self { self.image_sliced_view_of3_d = image_sliced_view_of3_d.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub attachment_feedback_loop_dynamic_state: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), attachment_feedback_loop_dynamic_state: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'_> { } impl<'a> PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT<'a> { #[inline] pub fn attachment_feedback_loop_dynamic_state( mut self, attachment_feedback_loop_dynamic_state: bool, ) -> Self { self.attachment_feedback_loop_dynamic_state = attachment_feedback_loop_dynamic_state.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub mutable_descriptor_type: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), mutable_descriptor_type: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'_> {} impl<'a> PhysicalDeviceMutableDescriptorTypeFeaturesEXT<'a> { #[inline] pub fn mutable_descriptor_type(mut self, mutable_descriptor_type: bool) -> Self { self.mutable_descriptor_type = mutable_descriptor_type.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MutableDescriptorTypeListEXT<'a> { pub descriptor_type_count: u32, pub p_descriptor_types: *const DescriptorType, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MutableDescriptorTypeListEXT<'_> {} unsafe impl Sync for MutableDescriptorTypeListEXT<'_> {} impl ::core::default::Default for MutableDescriptorTypeListEXT<'_> { #[inline] fn default() -> Self { Self { descriptor_type_count: u32::default(), p_descriptor_types: ::core::ptr::null(), _marker: PhantomData, } } } impl<'a> MutableDescriptorTypeListEXT<'a> { #[inline] pub fn descriptor_types(mut self, descriptor_types: &'a [DescriptorType]) -> Self { self.descriptor_type_count = descriptor_types.len() as _; self.p_descriptor_types = descriptor_types.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MutableDescriptorTypeCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mutable_descriptor_type_list_count: u32, pub p_mutable_descriptor_type_lists: *const MutableDescriptorTypeListEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MutableDescriptorTypeCreateInfoEXT<'_> {} unsafe impl Sync for MutableDescriptorTypeCreateInfoEXT<'_> {} impl ::core::default::Default for MutableDescriptorTypeCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mutable_descriptor_type_list_count: u32::default(), p_mutable_descriptor_type_lists: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MutableDescriptorTypeCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT; } unsafe impl ExtendsDescriptorSetLayoutCreateInfo for MutableDescriptorTypeCreateInfoEXT<'_> {} unsafe impl ExtendsDescriptorPoolCreateInfo for MutableDescriptorTypeCreateInfoEXT<'_> {} impl<'a> MutableDescriptorTypeCreateInfoEXT<'a> { #[inline] pub fn mutable_descriptor_type_lists( mut self, mutable_descriptor_type_lists: &'a [MutableDescriptorTypeListEXT<'a>], ) -> Self { self.mutable_descriptor_type_list_count = mutable_descriptor_type_lists.len() as _; self.p_mutable_descriptor_type_lists = mutable_descriptor_type_lists.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDepthClipControlFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub depth_clip_control: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDepthClipControlFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDepthClipControlFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDepthClipControlFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), depth_clip_control: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDepthClipControlFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClipControlFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipControlFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDepthClipControlFeaturesEXT<'a> { #[inline] pub fn depth_clip_control(mut self, depth_clip_control: bool) -> Self { self.depth_clip_control = depth_clip_control.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineViewportDepthClipControlCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub negative_one_to_one: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineViewportDepthClipControlCreateInfoEXT<'_> {} unsafe impl Sync for PipelineViewportDepthClipControlCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineViewportDepthClipControlCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), negative_one_to_one: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineViewportDepthClipControlCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportDepthClipControlCreateInfoEXT<'_> { } impl<'a> PipelineViewportDepthClipControlCreateInfoEXT<'a> { #[inline] pub fn negative_one_to_one(mut self, negative_one_to_one: bool) -> Self { self.negative_one_to_one = negative_one_to_one.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub vertex_input_dynamic_state: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), vertex_input_dynamic_state: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'_> {} impl<'a> PhysicalDeviceVertexInputDynamicStateFeaturesEXT<'a> { #[inline] pub fn vertex_input_dynamic_state(mut self, vertex_input_dynamic_state: bool) -> Self { self.vertex_input_dynamic_state = vertex_input_dynamic_state.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalMemoryRDMAFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_memory_rdma: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_memory_rdma: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExternalMemoryRDMAFeaturesNV<'_> {} impl<'a> PhysicalDeviceExternalMemoryRDMAFeaturesNV<'a> { #[inline] pub fn external_memory_rdma(mut self, external_memory_rdma: bool) -> Self { self.external_memory_rdma = external_memory_rdma.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VertexInputBindingDescription2EXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub binding: u32, pub stride: u32, pub input_rate: VertexInputRate, pub divisor: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VertexInputBindingDescription2EXT<'_> {} unsafe impl Sync for VertexInputBindingDescription2EXT<'_> {} impl ::core::default::Default for VertexInputBindingDescription2EXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), binding: u32::default(), stride: u32::default(), input_rate: VertexInputRate::default(), divisor: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VertexInputBindingDescription2EXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT; } impl<'a> VertexInputBindingDescription2EXT<'a> { #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn stride(mut self, stride: u32) -> Self { self.stride = stride; self } #[inline] pub fn input_rate(mut self, input_rate: VertexInputRate) -> Self { self.input_rate = input_rate; self } #[inline] pub fn divisor(mut self, divisor: u32) -> Self { self.divisor = divisor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VertexInputAttributeDescription2EXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub location: u32, pub binding: u32, pub format: Format, pub offset: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VertexInputAttributeDescription2EXT<'_> {} unsafe impl Sync for VertexInputAttributeDescription2EXT<'_> {} impl ::core::default::Default for VertexInputAttributeDescription2EXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), location: u32::default(), binding: u32::default(), format: Format::default(), offset: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VertexInputAttributeDescription2EXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT; } impl<'a> VertexInputAttributeDescription2EXT<'a> { #[inline] pub fn location(mut self, location: u32) -> Self { self.location = location; self } #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceColorWriteEnableFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub color_write_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceColorWriteEnableFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceColorWriteEnableFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceColorWriteEnableFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), color_write_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceColorWriteEnableFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceColorWriteEnableFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceColorWriteEnableFeaturesEXT<'_> {} impl<'a> PhysicalDeviceColorWriteEnableFeaturesEXT<'a> { #[inline] pub fn color_write_enable(mut self, color_write_enable: bool) -> Self { self.color_write_enable = color_write_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineColorWriteCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub attachment_count: u32, pub p_color_write_enables: *const Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineColorWriteCreateInfoEXT<'_> {} unsafe impl Sync for PipelineColorWriteCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineColorWriteCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), attachment_count: u32::default(), p_color_write_enables: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineColorWriteCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COLOR_WRITE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineColorBlendStateCreateInfo for PipelineColorWriteCreateInfoEXT<'_> {} impl<'a> PipelineColorWriteCreateInfoEXT<'a> { #[inline] pub fn color_write_enables(mut self, color_write_enables: &'a [Bool32]) -> Self { self.attachment_count = color_write_enables.len() as _; self.p_color_write_enables = color_write_enables.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryBarrier2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_stage_mask: PipelineStageFlags2, pub src_access_mask: AccessFlags2, pub dst_stage_mask: PipelineStageFlags2, pub dst_access_mask: AccessFlags2, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryBarrier2<'_> {} unsafe impl Sync for MemoryBarrier2<'_> {} impl ::core::default::Default for MemoryBarrier2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_stage_mask: PipelineStageFlags2::default(), src_access_mask: AccessFlags2::default(), dst_stage_mask: PipelineStageFlags2::default(), dst_access_mask: AccessFlags2::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryBarrier2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_BARRIER_2; } unsafe impl ExtendsSubpassDependency2 for MemoryBarrier2<'_> {} impl<'a> MemoryBarrier2<'a> { #[inline] pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self { self.src_stage_mask = src_stage_mask; self } #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self { self.dst_stage_mask = dst_stage_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self { self.dst_access_mask = dst_access_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageMemoryBarrier2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_stage_mask: PipelineStageFlags2, pub src_access_mask: AccessFlags2, pub dst_stage_mask: PipelineStageFlags2, pub dst_access_mask: AccessFlags2, pub old_layout: ImageLayout, pub new_layout: ImageLayout, pub src_queue_family_index: u32, pub dst_queue_family_index: u32, pub image: Image, pub subresource_range: ImageSubresourceRange, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageMemoryBarrier2<'_> {} unsafe impl Sync for ImageMemoryBarrier2<'_> {} impl ::core::default::Default for ImageMemoryBarrier2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_stage_mask: PipelineStageFlags2::default(), src_access_mask: AccessFlags2::default(), dst_stage_mask: PipelineStageFlags2::default(), dst_access_mask: AccessFlags2::default(), old_layout: ImageLayout::default(), new_layout: ImageLayout::default(), src_queue_family_index: u32::default(), dst_queue_family_index: u32::default(), image: Image::default(), subresource_range: ImageSubresourceRange::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageMemoryBarrier2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_BARRIER_2; } pub unsafe trait ExtendsImageMemoryBarrier2 {} impl<'a> ImageMemoryBarrier2<'a> { #[inline] pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self { self.src_stage_mask = src_stage_mask; self } #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self { self.dst_stage_mask = dst_stage_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn old_layout(mut self, old_layout: ImageLayout) -> Self { self.old_layout = old_layout; self } #[inline] pub fn new_layout(mut self, new_layout: ImageLayout) -> Self { self.new_layout = new_layout; self } #[inline] pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { self.src_queue_family_index = src_queue_family_index; self } #[inline] pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { self.dst_queue_family_index = dst_queue_family_index; self } #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self { self.subresource_range = subresource_range; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferMemoryBarrier2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src_stage_mask: PipelineStageFlags2, pub src_access_mask: AccessFlags2, pub dst_stage_mask: PipelineStageFlags2, pub dst_access_mask: AccessFlags2, pub src_queue_family_index: u32, pub dst_queue_family_index: u32, pub buffer: Buffer, pub offset: DeviceSize, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferMemoryBarrier2<'_> {} unsafe impl Sync for BufferMemoryBarrier2<'_> {} impl ::core::default::Default for BufferMemoryBarrier2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src_stage_mask: PipelineStageFlags2::default(), src_access_mask: AccessFlags2::default(), dst_stage_mask: PipelineStageFlags2::default(), dst_access_mask: AccessFlags2::default(), src_queue_family_index: u32::default(), dst_queue_family_index: u32::default(), buffer: Buffer::default(), offset: DeviceSize::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferMemoryBarrier2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_BARRIER_2; } pub unsafe trait ExtendsBufferMemoryBarrier2 {} impl<'a> BufferMemoryBarrier2<'a> { #[inline] pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self { self.src_stage_mask = src_stage_mask; self } #[inline] pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self { self.src_access_mask = src_access_mask; self } #[inline] pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self { self.dst_stage_mask = dst_stage_mask; self } #[inline] pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self { self.dst_access_mask = dst_access_mask; self } #[inline] pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self { self.src_queue_family_index = src_queue_family_index; self } #[inline] pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self { self.dst_queue_family_index = dst_queue_family_index; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DependencyInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub dependency_flags: DependencyFlags, pub memory_barrier_count: u32, pub p_memory_barriers: *const MemoryBarrier2<'a>, pub buffer_memory_barrier_count: u32, pub p_buffer_memory_barriers: *const BufferMemoryBarrier2<'a>, pub image_memory_barrier_count: u32, pub p_image_memory_barriers: *const ImageMemoryBarrier2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DependencyInfo<'_> {} unsafe impl Sync for DependencyInfo<'_> {} impl ::core::default::Default for DependencyInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), dependency_flags: DependencyFlags::default(), memory_barrier_count: u32::default(), p_memory_barriers: ::core::ptr::null(), buffer_memory_barrier_count: u32::default(), p_buffer_memory_barriers: ::core::ptr::null(), image_memory_barrier_count: u32::default(), p_image_memory_barriers: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DependencyInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEPENDENCY_INFO; } impl<'a> DependencyInfo<'a> { #[inline] pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self { self.dependency_flags = dependency_flags; self } #[inline] pub fn memory_barriers(mut self, memory_barriers: &'a [MemoryBarrier2<'a>]) -> Self { self.memory_barrier_count = memory_barriers.len() as _; self.p_memory_barriers = memory_barriers.as_ptr(); self } #[inline] pub fn buffer_memory_barriers( mut self, buffer_memory_barriers: &'a [BufferMemoryBarrier2<'a>], ) -> Self { self.buffer_memory_barrier_count = buffer_memory_barriers.len() as _; self.p_buffer_memory_barriers = buffer_memory_barriers.as_ptr(); self } #[inline] pub fn image_memory_barriers( mut self, image_memory_barriers: &'a [ImageMemoryBarrier2<'a>], ) -> Self { self.image_memory_barrier_count = image_memory_barriers.len() as _; self.p_image_memory_barriers = image_memory_barriers.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SemaphoreSubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub value: u64, pub stage_mask: PipelineStageFlags2, pub device_index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SemaphoreSubmitInfo<'_> {} unsafe impl Sync for SemaphoreSubmitInfo<'_> {} impl ::core::default::Default for SemaphoreSubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), value: u64::default(), stage_mask: PipelineStageFlags2::default(), device_index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SemaphoreSubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_SUBMIT_INFO; } impl<'a> SemaphoreSubmitInfo<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn value(mut self, value: u64) -> Self { self.value = value; self } #[inline] pub fn stage_mask(mut self, stage_mask: PipelineStageFlags2) -> Self { self.stage_mask = stage_mask; self } #[inline] pub fn device_index(mut self, device_index: u32) -> Self { self.device_index = device_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferSubmitInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub command_buffer: CommandBuffer, pub device_mask: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferSubmitInfo<'_> {} unsafe impl Sync for CommandBufferSubmitInfo<'_> {} impl ::core::default::Default for CommandBufferSubmitInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), command_buffer: CommandBuffer::default(), device_mask: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferSubmitInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_SUBMIT_INFO; } pub unsafe trait ExtendsCommandBufferSubmitInfo {} impl<'a> CommandBufferSubmitInfo<'a> { #[inline] pub fn command_buffer(mut self, command_buffer: CommandBuffer) -> Self { self.command_buffer = command_buffer; self } #[inline] pub fn device_mask(mut self, device_mask: u32) -> Self { self.device_mask = device_mask; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubmitInfo2<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: SubmitFlags, pub wait_semaphore_info_count: u32, pub p_wait_semaphore_infos: *const SemaphoreSubmitInfo<'a>, pub command_buffer_info_count: u32, pub p_command_buffer_infos: *const CommandBufferSubmitInfo<'a>, pub signal_semaphore_info_count: u32, pub p_signal_semaphore_infos: *const SemaphoreSubmitInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubmitInfo2<'_> {} unsafe impl Sync for SubmitInfo2<'_> {} impl ::core::default::Default for SubmitInfo2<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: SubmitFlags::default(), wait_semaphore_info_count: u32::default(), p_wait_semaphore_infos: ::core::ptr::null(), command_buffer_info_count: u32::default(), p_command_buffer_infos: ::core::ptr::null(), signal_semaphore_info_count: u32::default(), p_signal_semaphore_infos: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubmitInfo2<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBMIT_INFO_2; } pub unsafe trait ExtendsSubmitInfo2 {} impl<'a> SubmitInfo2<'a> { #[inline] pub fn flags(mut self, flags: SubmitFlags) -> Self { self.flags = flags; self } #[inline] pub fn wait_semaphore_infos( mut self, wait_semaphore_infos: &'a [SemaphoreSubmitInfo<'a>], ) -> Self { self.wait_semaphore_info_count = wait_semaphore_infos.len() as _; self.p_wait_semaphore_infos = wait_semaphore_infos.as_ptr(); self } #[inline] pub fn command_buffer_infos( mut self, command_buffer_infos: &'a [CommandBufferSubmitInfo<'a>], ) -> Self { self.command_buffer_info_count = command_buffer_infos.len() as _; self.p_command_buffer_infos = command_buffer_infos.as_ptr(); self } #[inline] pub fn signal_semaphore_infos( mut self, signal_semaphore_infos: &'a [SemaphoreSubmitInfo<'a>], ) -> Self { self.signal_semaphore_info_count = signal_semaphore_infos.len() as _; self.p_signal_semaphore_infos = signal_semaphore_infos.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyCheckpointProperties2NV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub checkpoint_execution_stage_mask: PipelineStageFlags2, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyCheckpointProperties2NV<'_> {} unsafe impl Sync for QueueFamilyCheckpointProperties2NV<'_> {} impl ::core::default::Default for QueueFamilyCheckpointProperties2NV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), checkpoint_execution_stage_mask: PipelineStageFlags2::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyCheckpointProperties2NV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV; } unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointProperties2NV<'_> {} impl<'a> QueueFamilyCheckpointProperties2NV<'a> { #[inline] pub fn checkpoint_execution_stage_mask( mut self, checkpoint_execution_stage_mask: PipelineStageFlags2, ) -> Self { self.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CheckpointData2NV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub stage: PipelineStageFlags2, pub p_checkpoint_marker: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CheckpointData2NV<'_> {} unsafe impl Sync for CheckpointData2NV<'_> {} impl ::core::default::Default for CheckpointData2NV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), stage: PipelineStageFlags2::default(), p_checkpoint_marker: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CheckpointData2NV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CHECKPOINT_DATA_2_NV; } impl<'a> CheckpointData2NV<'a> { #[inline] pub fn stage(mut self, stage: PipelineStageFlags2) -> Self { self.stage = stage; self } #[inline] pub fn checkpoint_marker(mut self, checkpoint_marker: *mut c_void) -> Self { self.p_checkpoint_marker = checkpoint_marker; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSynchronization2Features<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub synchronization2: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSynchronization2Features<'_> {} unsafe impl Sync for PhysicalDeviceSynchronization2Features<'_> {} impl ::core::default::Default for PhysicalDeviceSynchronization2Features<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), synchronization2: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSynchronization2Features<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSynchronization2Features<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSynchronization2Features<'_> {} impl<'a> PhysicalDeviceSynchronization2Features<'a> { #[inline] pub fn synchronization2(mut self, synchronization2: bool) -> Self { self.synchronization2 = synchronization2.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceHostImageCopyFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub host_image_copy: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceHostImageCopyFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceHostImageCopyFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceHostImageCopyFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), host_image_copy: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceHostImageCopyFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceHostImageCopyFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceHostImageCopyFeaturesEXT<'_> {} impl<'a> PhysicalDeviceHostImageCopyFeaturesEXT<'a> { #[inline] pub fn host_image_copy(mut self, host_image_copy: bool) -> Self { self.host_image_copy = host_image_copy.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceHostImageCopyPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub copy_src_layout_count: u32, pub p_copy_src_layouts: *mut ImageLayout, pub copy_dst_layout_count: u32, pub p_copy_dst_layouts: *mut ImageLayout, pub optimal_tiling_layout_uuid: [u8; UUID_SIZE], pub identical_memory_type_requirements: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceHostImageCopyPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceHostImageCopyPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceHostImageCopyPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), copy_src_layout_count: u32::default(), p_copy_src_layouts: ::core::ptr::null_mut(), copy_dst_layout_count: u32::default(), p_copy_dst_layouts: ::core::ptr::null_mut(), optimal_tiling_layout_uuid: unsafe { ::core::mem::zeroed() }, identical_memory_type_requirements: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceHostImageCopyPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceHostImageCopyPropertiesEXT<'_> {} impl<'a> PhysicalDeviceHostImageCopyPropertiesEXT<'a> { #[inline] pub fn copy_src_layouts(mut self, copy_src_layouts: &'a mut [ImageLayout]) -> Self { self.copy_src_layout_count = copy_src_layouts.len() as _; self.p_copy_src_layouts = copy_src_layouts.as_mut_ptr(); self } #[inline] pub fn copy_dst_layouts(mut self, copy_dst_layouts: &'a mut [ImageLayout]) -> Self { self.copy_dst_layout_count = copy_dst_layouts.len() as _; self.p_copy_dst_layouts = copy_dst_layouts.as_mut_ptr(); self } #[inline] pub fn optimal_tiling_layout_uuid( mut self, optimal_tiling_layout_uuid: [u8; UUID_SIZE], ) -> Self { self.optimal_tiling_layout_uuid = optimal_tiling_layout_uuid; self } #[inline] pub fn identical_memory_type_requirements( mut self, identical_memory_type_requirements: bool, ) -> Self { self.identical_memory_type_requirements = identical_memory_type_requirements.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryToImageCopyEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_host_pointer: *const c_void, pub memory_row_length: u32, pub memory_image_height: u32, pub image_subresource: ImageSubresourceLayers, pub image_offset: Offset3D, pub image_extent: Extent3D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryToImageCopyEXT<'_> {} unsafe impl Sync for MemoryToImageCopyEXT<'_> {} impl ::core::default::Default for MemoryToImageCopyEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_host_pointer: ::core::ptr::null(), memory_row_length: u32::default(), memory_image_height: u32::default(), image_subresource: ImageSubresourceLayers::default(), image_offset: Offset3D::default(), image_extent: Extent3D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryToImageCopyEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_TO_IMAGE_COPY_EXT; } impl<'a> MemoryToImageCopyEXT<'a> { #[inline] pub fn host_pointer(mut self, host_pointer: *const c_void) -> Self { self.p_host_pointer = host_pointer; self } #[inline] pub fn memory_row_length(mut self, memory_row_length: u32) -> Self { self.memory_row_length = memory_row_length; self } #[inline] pub fn memory_image_height(mut self, memory_image_height: u32) -> Self { self.memory_image_height = memory_image_height; self } #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self { self.image_subresource = image_subresource; self } #[inline] pub fn image_offset(mut self, image_offset: Offset3D) -> Self { self.image_offset = image_offset; self } #[inline] pub fn image_extent(mut self, image_extent: Extent3D) -> Self { self.image_extent = image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageToMemoryCopyEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_host_pointer: *mut c_void, pub memory_row_length: u32, pub memory_image_height: u32, pub image_subresource: ImageSubresourceLayers, pub image_offset: Offset3D, pub image_extent: Extent3D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageToMemoryCopyEXT<'_> {} unsafe impl Sync for ImageToMemoryCopyEXT<'_> {} impl ::core::default::Default for ImageToMemoryCopyEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_host_pointer: ::core::ptr::null_mut(), memory_row_length: u32::default(), memory_image_height: u32::default(), image_subresource: ImageSubresourceLayers::default(), image_offset: Offset3D::default(), image_extent: Extent3D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageToMemoryCopyEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_TO_MEMORY_COPY_EXT; } impl<'a> ImageToMemoryCopyEXT<'a> { #[inline] pub fn host_pointer(mut self, host_pointer: *mut c_void) -> Self { self.p_host_pointer = host_pointer; self } #[inline] pub fn memory_row_length(mut self, memory_row_length: u32) -> Self { self.memory_row_length = memory_row_length; self } #[inline] pub fn memory_image_height(mut self, memory_image_height: u32) -> Self { self.memory_image_height = memory_image_height; self } #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self { self.image_subresource = image_subresource; self } #[inline] pub fn image_offset(mut self, image_offset: Offset3D) -> Self { self.image_offset = image_offset; self } #[inline] pub fn image_extent(mut self, image_extent: Extent3D) -> Self { self.image_extent = image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyMemoryToImageInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: HostImageCopyFlagsEXT, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const MemoryToImageCopyEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyMemoryToImageInfoEXT<'_> {} unsafe impl Sync for CopyMemoryToImageInfoEXT<'_> {} impl ::core::default::Default for CopyMemoryToImageInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: HostImageCopyFlagsEXT::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyMemoryToImageInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_MEMORY_TO_IMAGE_INFO_EXT; } impl<'a> CopyMemoryToImageInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: HostImageCopyFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [MemoryToImageCopyEXT<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyImageToMemoryInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: HostImageCopyFlagsEXT, pub src_image: Image, pub src_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const ImageToMemoryCopyEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyImageToMemoryInfoEXT<'_> {} unsafe impl Sync for CopyImageToMemoryInfoEXT<'_> {} impl ::core::default::Default for CopyImageToMemoryInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: HostImageCopyFlagsEXT::default(), src_image: Image::default(), src_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyImageToMemoryInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_TO_MEMORY_INFO_EXT; } impl<'a> CopyImageToMemoryInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: HostImageCopyFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [ImageToMemoryCopyEXT<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyImageToImageInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: HostImageCopyFlagsEXT, pub src_image: Image, pub src_image_layout: ImageLayout, pub dst_image: Image, pub dst_image_layout: ImageLayout, pub region_count: u32, pub p_regions: *const ImageCopy2<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyImageToImageInfoEXT<'_> {} unsafe impl Sync for CopyImageToImageInfoEXT<'_> {} impl ::core::default::Default for CopyImageToImageInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: HostImageCopyFlagsEXT::default(), src_image: Image::default(), src_image_layout: ImageLayout::default(), dst_image: Image::default(), dst_image_layout: ImageLayout::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyImageToImageInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_TO_IMAGE_INFO_EXT; } impl<'a> CopyImageToImageInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: HostImageCopyFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn src_image(mut self, src_image: Image) -> Self { self.src_image = src_image; self } #[inline] pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self { self.src_image_layout = src_image_layout; self } #[inline] pub fn dst_image(mut self, dst_image: Image) -> Self { self.dst_image = dst_image; self } #[inline] pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self { self.dst_image_layout = dst_image_layout; self } #[inline] pub fn regions(mut self, regions: &'a [ImageCopy2<'a>]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct HostImageLayoutTransitionInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub old_layout: ImageLayout, pub new_layout: ImageLayout, pub subresource_range: ImageSubresourceRange, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for HostImageLayoutTransitionInfoEXT<'_> {} unsafe impl Sync for HostImageLayoutTransitionInfoEXT<'_> {} impl ::core::default::Default for HostImageLayoutTransitionInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), old_layout: ImageLayout::default(), new_layout: ImageLayout::default(), subresource_range: ImageSubresourceRange::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for HostImageLayoutTransitionInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT; } impl<'a> HostImageLayoutTransitionInfoEXT<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn old_layout(mut self, old_layout: ImageLayout) -> Self { self.old_layout = old_layout; self } #[inline] pub fn new_layout(mut self, new_layout: ImageLayout) -> Self { self.new_layout = new_layout; self } #[inline] pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self { self.subresource_range = subresource_range; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubresourceHostMemcpySizeEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubresourceHostMemcpySizeEXT<'_> {} unsafe impl Sync for SubresourceHostMemcpySizeEXT<'_> {} impl ::core::default::Default for SubresourceHostMemcpySizeEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubresourceHostMemcpySizeEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBRESOURCE_HOST_MEMCPY_SIZE_EXT; } unsafe impl ExtendsSubresourceLayout2KHR for SubresourceHostMemcpySizeEXT<'_> {} impl<'a> SubresourceHostMemcpySizeEXT<'a> { #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct HostImageCopyDevicePerformanceQueryEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub optimal_device_access: Bool32, pub identical_memory_layout: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for HostImageCopyDevicePerformanceQueryEXT<'_> {} unsafe impl Sync for HostImageCopyDevicePerformanceQueryEXT<'_> {} impl ::core::default::Default for HostImageCopyDevicePerformanceQueryEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), optimal_device_access: Bool32::default(), identical_memory_layout: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for HostImageCopyDevicePerformanceQueryEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT; } unsafe impl ExtendsImageFormatProperties2 for HostImageCopyDevicePerformanceQueryEXT<'_> {} impl<'a> HostImageCopyDevicePerformanceQueryEXT<'a> { #[inline] pub fn optimal_device_access(mut self, optimal_device_access: bool) -> Self { self.optimal_device_access = optimal_device_access.into(); self } #[inline] pub fn identical_memory_layout(mut self, identical_memory_layout: bool) -> Self { self.identical_memory_layout = identical_memory_layout.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub primitives_generated_query: Bool32, pub primitives_generated_query_with_rasterizer_discard: Bool32, pub primitives_generated_query_with_non_zero_streams: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), primitives_generated_query: Bool32::default(), primitives_generated_query_with_rasterizer_discard: Bool32::default(), primitives_generated_query_with_non_zero_streams: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'_> {} impl<'a> PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT<'a> { #[inline] pub fn primitives_generated_query(mut self, primitives_generated_query: bool) -> Self { self.primitives_generated_query = primitives_generated_query.into(); self } #[inline] pub fn primitives_generated_query_with_rasterizer_discard( mut self, primitives_generated_query_with_rasterizer_discard: bool, ) -> Self { self.primitives_generated_query_with_rasterizer_discard = primitives_generated_query_with_rasterizer_discard.into(); self } #[inline] pub fn primitives_generated_query_with_non_zero_streams( mut self, primitives_generated_query_with_non_zero_streams: bool, ) -> Self { self.primitives_generated_query_with_non_zero_streams = primitives_generated_query_with_non_zero_streams.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLegacyDitheringFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub legacy_dithering: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceLegacyDitheringFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceLegacyDitheringFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceLegacyDitheringFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), legacy_dithering: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceLegacyDitheringFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLegacyDitheringFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLegacyDitheringFeaturesEXT<'_> {} impl<'a> PhysicalDeviceLegacyDitheringFeaturesEXT<'a> { #[inline] pub fn legacy_dithering(mut self, legacy_dithering: bool) -> Self { self.legacy_dithering = legacy_dithering.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub multisampled_render_to_single_sampled: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), multisampled_render_to_single_sampled: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'_> { } impl<'a> PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT<'a> { #[inline] pub fn multisampled_render_to_single_sampled( mut self, multisampled_render_to_single_sampled: bool, ) -> Self { self.multisampled_render_to_single_sampled = multisampled_render_to_single_sampled.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubpassResolvePerformanceQueryEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub optimal: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubpassResolvePerformanceQueryEXT<'_> {} unsafe impl Sync for SubpassResolvePerformanceQueryEXT<'_> {} impl ::core::default::Default for SubpassResolvePerformanceQueryEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), optimal: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubpassResolvePerformanceQueryEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT; } unsafe impl ExtendsFormatProperties2 for SubpassResolvePerformanceQueryEXT<'_> {} impl<'a> SubpassResolvePerformanceQueryEXT<'a> { #[inline] pub fn optimal(mut self, optimal: bool) -> Self { self.optimal = optimal.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MultisampledRenderToSingleSampledInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub multisampled_render_to_single_sampled_enable: Bool32, pub rasterization_samples: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MultisampledRenderToSingleSampledInfoEXT<'_> {} unsafe impl Sync for MultisampledRenderToSingleSampledInfoEXT<'_> {} impl ::core::default::Default for MultisampledRenderToSingleSampledInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), multisampled_render_to_single_sampled_enable: Bool32::default(), rasterization_samples: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MultisampledRenderToSingleSampledInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT; } unsafe impl ExtendsSubpassDescription2 for MultisampledRenderToSingleSampledInfoEXT<'_> {} unsafe impl ExtendsRenderingInfo for MultisampledRenderToSingleSampledInfoEXT<'_> {} impl<'a> MultisampledRenderToSingleSampledInfoEXT<'a> { #[inline] pub fn multisampled_render_to_single_sampled_enable( mut self, multisampled_render_to_single_sampled_enable: bool, ) -> Self { self.multisampled_render_to_single_sampled_enable = multisampled_render_to_single_sampled_enable.into(); self } #[inline] pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self { self.rasterization_samples = rasterization_samples; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineProtectedAccessFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_protected_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_protected_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineProtectedAccessFeaturesEXT<'_> {} impl<'a> PhysicalDevicePipelineProtectedAccessFeaturesEXT<'a> { #[inline] pub fn pipeline_protected_access(mut self, pipeline_protected_access: bool) -> Self { self.pipeline_protected_access = pipeline_protected_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyVideoPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub video_codec_operations: VideoCodecOperationFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyVideoPropertiesKHR<'_> {} unsafe impl Sync for QueueFamilyVideoPropertiesKHR<'_> {} impl ::core::default::Default for QueueFamilyVideoPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), video_codec_operations: VideoCodecOperationFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyVideoPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_VIDEO_PROPERTIES_KHR; } unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyVideoPropertiesKHR<'_> {} impl<'a> QueueFamilyVideoPropertiesKHR<'a> { #[inline] pub fn video_codec_operations( mut self, video_codec_operations: VideoCodecOperationFlagsKHR, ) -> Self { self.video_codec_operations = video_codec_operations; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueueFamilyQueryResultStatusPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub query_result_status_support: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueueFamilyQueryResultStatusPropertiesKHR<'_> {} unsafe impl Sync for QueueFamilyQueryResultStatusPropertiesKHR<'_> {} impl ::core::default::Default for QueueFamilyQueryResultStatusPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), query_result_status_support: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueueFamilyQueryResultStatusPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR; } unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyQueryResultStatusPropertiesKHR<'_> {} impl<'a> QueueFamilyQueryResultStatusPropertiesKHR<'a> { #[inline] pub fn query_result_status_support(mut self, query_result_status_support: bool) -> Self { self.query_result_status_support = query_result_status_support.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoProfileListInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub profile_count: u32, pub p_profiles: *const VideoProfileInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoProfileListInfoKHR<'_> {} unsafe impl Sync for VideoProfileListInfoKHR<'_> {} impl ::core::default::Default for VideoProfileListInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), profile_count: u32::default(), p_profiles: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoProfileListInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PROFILE_LIST_INFO_KHR; } unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for VideoProfileListInfoKHR<'_> {} unsafe impl ExtendsPhysicalDeviceVideoFormatInfoKHR for VideoProfileListInfoKHR<'_> {} unsafe impl ExtendsImageCreateInfo for VideoProfileListInfoKHR<'_> {} unsafe impl ExtendsBufferCreateInfo for VideoProfileListInfoKHR<'_> {} impl<'a> VideoProfileListInfoKHR<'a> { #[inline] pub fn profiles(mut self, profiles: &'a [VideoProfileInfoKHR<'a>]) -> Self { self.profile_count = profiles.len() as _; self.p_profiles = profiles.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVideoFormatInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_usage: ImageUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVideoFormatInfoKHR<'_> {} unsafe impl Sync for PhysicalDeviceVideoFormatInfoKHR<'_> {} impl ::core::default::Default for PhysicalDeviceVideoFormatInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_usage: ImageUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVideoFormatInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR; } pub unsafe trait ExtendsPhysicalDeviceVideoFormatInfoKHR {} impl<'a> PhysicalDeviceVideoFormatInfoKHR<'a> { #[inline] pub fn image_usage(mut self, image_usage: ImageUsageFlags) -> Self { self.image_usage = image_usage; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoFormatPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format: Format, pub component_mapping: ComponentMapping, pub image_create_flags: ImageCreateFlags, pub image_type: ImageType, pub image_tiling: ImageTiling, pub image_usage_flags: ImageUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoFormatPropertiesKHR<'_> {} unsafe impl Sync for VideoFormatPropertiesKHR<'_> {} impl ::core::default::Default for VideoFormatPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format: Format::default(), component_mapping: ComponentMapping::default(), image_create_flags: ImageCreateFlags::default(), image_type: ImageType::default(), image_tiling: ImageTiling::default(), image_usage_flags: ImageUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoFormatPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_FORMAT_PROPERTIES_KHR; } impl<'a> VideoFormatPropertiesKHR<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn component_mapping(mut self, component_mapping: ComponentMapping) -> Self { self.component_mapping = component_mapping; self } #[inline] pub fn image_create_flags(mut self, image_create_flags: ImageCreateFlags) -> Self { self.image_create_flags = image_create_flags; self } #[inline] pub fn image_type(mut self, image_type: ImageType) -> Self { self.image_type = image_type; self } #[inline] pub fn image_tiling(mut self, image_tiling: ImageTiling) -> Self { self.image_tiling = image_tiling; self } #[inline] pub fn image_usage_flags(mut self, image_usage_flags: ImageUsageFlags) -> Self { self.image_usage_flags = image_usage_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub video_codec_operation: VideoCodecOperationFlagsKHR, pub chroma_subsampling: VideoChromaSubsamplingFlagsKHR, pub luma_bit_depth: VideoComponentBitDepthFlagsKHR, pub chroma_bit_depth: VideoComponentBitDepthFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoProfileInfoKHR<'_> {} unsafe impl Sync for VideoProfileInfoKHR<'_> {} impl ::core::default::Default for VideoProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), video_codec_operation: VideoCodecOperationFlagsKHR::default(), chroma_subsampling: VideoChromaSubsamplingFlagsKHR::default(), luma_bit_depth: VideoComponentBitDepthFlagsKHR::default(), chroma_bit_depth: VideoComponentBitDepthFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PROFILE_INFO_KHR; } unsafe impl ExtendsQueryPoolCreateInfo for VideoProfileInfoKHR<'_> {} pub unsafe trait ExtendsVideoProfileInfoKHR {} impl<'a> VideoProfileInfoKHR<'a> { #[inline] pub fn video_codec_operation( mut self, video_codec_operation: VideoCodecOperationFlagsKHR, ) -> Self { self.video_codec_operation = video_codec_operation; self } #[inline] pub fn chroma_subsampling( mut self, chroma_subsampling: VideoChromaSubsamplingFlagsKHR, ) -> Self { self.chroma_subsampling = chroma_subsampling; self } #[inline] pub fn luma_bit_depth(mut self, luma_bit_depth: VideoComponentBitDepthFlagsKHR) -> Self { self.luma_bit_depth = luma_bit_depth; self } #[inline] pub fn chroma_bit_depth(mut self, chroma_bit_depth: VideoComponentBitDepthFlagsKHR) -> Self { self.chroma_bit_depth = chroma_bit_depth; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: VideoCapabilityFlagsKHR, pub min_bitstream_buffer_offset_alignment: DeviceSize, pub min_bitstream_buffer_size_alignment: DeviceSize, pub picture_access_granularity: Extent2D, pub min_coded_extent: Extent2D, pub max_coded_extent: Extent2D, pub max_dpb_slots: u32, pub max_active_reference_pictures: u32, pub std_header_version: ExtensionProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoCapabilitiesKHR<'_> {} unsafe impl Sync for VideoCapabilitiesKHR<'_> {} impl ::core::default::Default for VideoCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: VideoCapabilityFlagsKHR::default(), min_bitstream_buffer_offset_alignment: DeviceSize::default(), min_bitstream_buffer_size_alignment: DeviceSize::default(), picture_access_granularity: Extent2D::default(), min_coded_extent: Extent2D::default(), max_coded_extent: Extent2D::default(), max_dpb_slots: u32::default(), max_active_reference_pictures: u32::default(), std_header_version: ExtensionProperties::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_CAPABILITIES_KHR; } pub unsafe trait ExtendsVideoCapabilitiesKHR {} impl<'a> VideoCapabilitiesKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoCapabilityFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn min_bitstream_buffer_offset_alignment( mut self, min_bitstream_buffer_offset_alignment: DeviceSize, ) -> Self { self.min_bitstream_buffer_offset_alignment = min_bitstream_buffer_offset_alignment; self } #[inline] pub fn min_bitstream_buffer_size_alignment( mut self, min_bitstream_buffer_size_alignment: DeviceSize, ) -> Self { self.min_bitstream_buffer_size_alignment = min_bitstream_buffer_size_alignment; self } #[inline] pub fn picture_access_granularity(mut self, picture_access_granularity: Extent2D) -> Self { self.picture_access_granularity = picture_access_granularity; self } #[inline] pub fn min_coded_extent(mut self, min_coded_extent: Extent2D) -> Self { self.min_coded_extent = min_coded_extent; self } #[inline] pub fn max_coded_extent(mut self, max_coded_extent: Extent2D) -> Self { self.max_coded_extent = max_coded_extent; self } #[inline] pub fn max_dpb_slots(mut self, max_dpb_slots: u32) -> Self { self.max_dpb_slots = max_dpb_slots; self } #[inline] pub fn max_active_reference_pictures(mut self, max_active_reference_pictures: u32) -> Self { self.max_active_reference_pictures = max_active_reference_pictures; self } #[inline] pub fn std_header_version(mut self, std_header_version: ExtensionProperties) -> Self { self.std_header_version = std_header_version; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoSessionMemoryRequirementsKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_bind_index: u32, pub memory_requirements: MemoryRequirements, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoSessionMemoryRequirementsKHR<'_> {} unsafe impl Sync for VideoSessionMemoryRequirementsKHR<'_> {} impl ::core::default::Default for VideoSessionMemoryRequirementsKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_bind_index: u32::default(), memory_requirements: MemoryRequirements::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoSessionMemoryRequirementsKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR; } impl<'a> VideoSessionMemoryRequirementsKHR<'a> { #[inline] pub fn memory_bind_index(mut self, memory_bind_index: u32) -> Self { self.memory_bind_index = memory_bind_index; self } #[inline] pub fn memory_requirements(mut self, memory_requirements: MemoryRequirements) -> Self { self.memory_requirements = memory_requirements; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindVideoSessionMemoryInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory_bind_index: u32, pub memory: DeviceMemory, pub memory_offset: DeviceSize, pub memory_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindVideoSessionMemoryInfoKHR<'_> {} unsafe impl Sync for BindVideoSessionMemoryInfoKHR<'_> {} impl ::core::default::Default for BindVideoSessionMemoryInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory_bind_index: u32::default(), memory: DeviceMemory::default(), memory_offset: DeviceSize::default(), memory_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindVideoSessionMemoryInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_VIDEO_SESSION_MEMORY_INFO_KHR; } impl<'a> BindVideoSessionMemoryInfoKHR<'a> { #[inline] pub fn memory_bind_index(mut self, memory_bind_index: u32) -> Self { self.memory_bind_index = memory_bind_index; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self { self.memory_offset = memory_offset; self } #[inline] pub fn memory_size(mut self, memory_size: DeviceSize) -> Self { self.memory_size = memory_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoPictureResourceInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub coded_offset: Offset2D, pub coded_extent: Extent2D, pub base_array_layer: u32, pub image_view_binding: ImageView, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoPictureResourceInfoKHR<'_> {} unsafe impl Sync for VideoPictureResourceInfoKHR<'_> {} impl ::core::default::Default for VideoPictureResourceInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), coded_offset: Offset2D::default(), coded_extent: Extent2D::default(), base_array_layer: u32::default(), image_view_binding: ImageView::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoPictureResourceInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PICTURE_RESOURCE_INFO_KHR; } impl<'a> VideoPictureResourceInfoKHR<'a> { #[inline] pub fn coded_offset(mut self, coded_offset: Offset2D) -> Self { self.coded_offset = coded_offset; self } #[inline] pub fn coded_extent(mut self, coded_extent: Extent2D) -> Self { self.coded_extent = coded_extent; self } #[inline] pub fn base_array_layer(mut self, base_array_layer: u32) -> Self { self.base_array_layer = base_array_layer; self } #[inline] pub fn image_view_binding(mut self, image_view_binding: ImageView) -> Self { self.image_view_binding = image_view_binding; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoReferenceSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub slot_index: i32, pub p_picture_resource: *const VideoPictureResourceInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoReferenceSlotInfoKHR<'_> {} unsafe impl Sync for VideoReferenceSlotInfoKHR<'_> {} impl ::core::default::Default for VideoReferenceSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), slot_index: i32::default(), p_picture_resource: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoReferenceSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_REFERENCE_SLOT_INFO_KHR; } pub unsafe trait ExtendsVideoReferenceSlotInfoKHR {} impl<'a> VideoReferenceSlotInfoKHR<'a> { #[inline] pub fn slot_index(mut self, slot_index: i32) -> Self { self.slot_index = slot_index; self } #[inline] pub fn picture_resource( mut self, picture_resource: &'a VideoPictureResourceInfoKHR<'a>, ) -> Self { self.p_picture_resource = picture_resource; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: VideoDecodeCapabilityFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeCapabilitiesKHR<'_> {} unsafe impl Sync for VideoDecodeCapabilitiesKHR<'_> {} impl ::core::default::Default for VideoDecodeCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: VideoDecodeCapabilityFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeCapabilitiesKHR<'_> {} impl<'a> VideoDecodeCapabilitiesKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoDecodeCapabilityFlagsKHR) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeUsageInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub video_usage_hints: VideoDecodeUsageFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeUsageInfoKHR<'_> {} unsafe impl Sync for VideoDecodeUsageInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeUsageInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), video_usage_hints: VideoDecodeUsageFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeUsageInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_USAGE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeUsageInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeUsageInfoKHR<'_> {} impl<'a> VideoDecodeUsageInfoKHR<'a> { #[inline] pub fn video_usage_hints(mut self, video_usage_hints: VideoDecodeUsageFlagsKHR) -> Self { self.video_usage_hints = video_usage_hints; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoDecodeFlagsKHR, pub src_buffer: Buffer, pub src_buffer_offset: DeviceSize, pub src_buffer_range: DeviceSize, pub dst_picture_resource: VideoPictureResourceInfoKHR<'a>, pub p_setup_reference_slot: *const VideoReferenceSlotInfoKHR<'a>, pub reference_slot_count: u32, pub p_reference_slots: *const VideoReferenceSlotInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeInfoKHR<'_> {} unsafe impl Sync for VideoDecodeInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoDecodeFlagsKHR::default(), src_buffer: Buffer::default(), src_buffer_offset: DeviceSize::default(), src_buffer_range: DeviceSize::default(), dst_picture_resource: VideoPictureResourceInfoKHR::default(), p_setup_reference_slot: ::core::ptr::null(), reference_slot_count: u32::default(), p_reference_slots: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_INFO_KHR; } pub unsafe trait ExtendsVideoDecodeInfoKHR {} impl<'a> VideoDecodeInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoDecodeFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn src_buffer(mut self, src_buffer: Buffer) -> Self { self.src_buffer = src_buffer; self } #[inline] pub fn src_buffer_offset(mut self, src_buffer_offset: DeviceSize) -> Self { self.src_buffer_offset = src_buffer_offset; self } #[inline] pub fn src_buffer_range(mut self, src_buffer_range: DeviceSize) -> Self { self.src_buffer_range = src_buffer_range; self } #[inline] pub fn dst_picture_resource( mut self, dst_picture_resource: VideoPictureResourceInfoKHR<'a>, ) -> Self { self.dst_picture_resource = dst_picture_resource; self } #[inline] pub fn setup_reference_slot( mut self, setup_reference_slot: &'a VideoReferenceSlotInfoKHR<'a>, ) -> Self { self.p_setup_reference_slot = setup_reference_slot; self } #[inline] pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR<'a>]) -> Self { self.reference_slot_count = reference_slots.len() as _; self.p_reference_slots = reference_slots.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVideoMaintenance1FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub video_maintenance1: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVideoMaintenance1FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceVideoMaintenance1FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceVideoMaintenance1FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), video_maintenance1: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVideoMaintenance1FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVideoMaintenance1FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVideoMaintenance1FeaturesKHR<'_> {} impl<'a> PhysicalDeviceVideoMaintenance1FeaturesKHR<'a> { #[inline] pub fn video_maintenance1(mut self, video_maintenance1: bool) -> Self { self.video_maintenance1 = video_maintenance1.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoInlineQueryInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub query_pool: QueryPool, pub first_query: u32, pub query_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoInlineQueryInfoKHR<'_> {} unsafe impl Sync for VideoInlineQueryInfoKHR<'_> {} impl ::core::default::Default for VideoInlineQueryInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), query_pool: QueryPool::default(), first_query: u32::default(), query_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoInlineQueryInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_INLINE_QUERY_INFO_KHR; } unsafe impl ExtendsVideoDecodeInfoKHR for VideoInlineQueryInfoKHR<'_> {} unsafe impl ExtendsVideoEncodeInfoKHR for VideoInlineQueryInfoKHR<'_> {} impl<'a> VideoInlineQueryInfoKHR<'a> { #[inline] pub fn query_pool(mut self, query_pool: QueryPool) -> Self { self.query_pool = query_pool; self } #[inline] pub fn first_query(mut self, first_query: u32) -> Self { self.first_query = first_query; self } #[inline] pub fn query_count(mut self, query_count: u32) -> Self { self.query_count = query_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264ProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_profile_idc: StdVideoH264ProfileIdc, pub picture_layout: VideoDecodeH264PictureLayoutFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264ProfileInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH264ProfileInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH264ProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_profile_idc: StdVideoH264ProfileIdc::default(), picture_layout: VideoDecodeH264PictureLayoutFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264ProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_PROFILE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH264ProfileInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH264ProfileInfoKHR<'_> {} impl<'a> VideoDecodeH264ProfileInfoKHR<'a> { #[inline] pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH264ProfileIdc) -> Self { self.std_profile_idc = std_profile_idc; self } #[inline] pub fn picture_layout(mut self, picture_layout: VideoDecodeH264PictureLayoutFlagsKHR) -> Self { self.picture_layout = picture_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264CapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_level_idc: StdVideoH264LevelIdc, pub field_offset_granularity: Offset2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264CapabilitiesKHR<'_> {} unsafe impl Sync for VideoDecodeH264CapabilitiesKHR<'_> {} impl ::core::default::Default for VideoDecodeH264CapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_level_idc: StdVideoH264LevelIdc::default(), field_offset_granularity: Offset2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264CapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH264CapabilitiesKHR<'_> {} impl<'a> VideoDecodeH264CapabilitiesKHR<'a> { #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH264LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } #[inline] pub fn field_offset_granularity(mut self, field_offset_granularity: Offset2D) -> Self { self.field_offset_granularity = field_offset_granularity; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264SessionParametersAddInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_sps_count: u32, pub p_std_sp_ss: *const StdVideoH264SequenceParameterSet, pub std_pps_count: u32, pub p_std_pp_ss: *const StdVideoH264PictureParameterSet, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264SessionParametersAddInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH264SessionParametersAddInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH264SessionParametersAddInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_sps_count: u32::default(), p_std_sp_ss: ::core::ptr::null(), std_pps_count: u32::default(), p_std_pp_ss: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264SessionParametersAddInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR for VideoDecodeH264SessionParametersAddInfoKHR<'_> { } impl<'a> VideoDecodeH264SessionParametersAddInfoKHR<'a> { #[inline] pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH264SequenceParameterSet]) -> Self { self.std_sps_count = std_sp_ss.len() as _; self.p_std_sp_ss = std_sp_ss.as_ptr(); self } #[inline] pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH264PictureParameterSet]) -> Self { self.std_pps_count = std_pp_ss.len() as _; self.p_std_pp_ss = std_pp_ss.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264SessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_std_sps_count: u32, pub max_std_pps_count: u32, pub p_parameters_add_info: *const VideoDecodeH264SessionParametersAddInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264SessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH264SessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH264SessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_std_sps_count: u32::default(), max_std_pps_count: u32::default(), p_parameters_add_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264SessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoDecodeH264SessionParametersCreateInfoKHR<'_> { } impl<'a> VideoDecodeH264SessionParametersCreateInfoKHR<'a> { #[inline] pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self { self.max_std_sps_count = max_std_sps_count; self } #[inline] pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self { self.max_std_pps_count = max_std_pps_count; self } #[inline] pub fn parameters_add_info( mut self, parameters_add_info: &'a VideoDecodeH264SessionParametersAddInfoKHR<'a>, ) -> Self { self.p_parameters_add_info = parameters_add_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264PictureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_picture_info: *const StdVideoDecodeH264PictureInfo, pub slice_count: u32, pub p_slice_offsets: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264PictureInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH264PictureInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH264PictureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_picture_info: ::core::ptr::null(), slice_count: u32::default(), p_slice_offsets: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264PictureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_PICTURE_INFO_KHR; } unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH264PictureInfoKHR<'_> {} impl<'a> VideoDecodeH264PictureInfoKHR<'a> { #[inline] pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoDecodeH264PictureInfo) -> Self { self.p_std_picture_info = std_picture_info; self } #[inline] pub fn slice_offsets(mut self, slice_offsets: &'a [u32]) -> Self { self.slice_count = slice_offsets.len() as _; self.p_slice_offsets = slice_offsets.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH264DpbSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_reference_info: *const StdVideoDecodeH264ReferenceInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH264DpbSlotInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH264DpbSlotInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH264DpbSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_reference_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH264DpbSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR; } unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH264DpbSlotInfoKHR<'_> {} impl<'a> VideoDecodeH264DpbSlotInfoKHR<'a> { #[inline] pub fn std_reference_info( mut self, std_reference_info: &'a StdVideoDecodeH264ReferenceInfo, ) -> Self { self.p_std_reference_info = std_reference_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265ProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_profile_idc: StdVideoH265ProfileIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265ProfileInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH265ProfileInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH265ProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_profile_idc: StdVideoH265ProfileIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265ProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_PROFILE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH265ProfileInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH265ProfileInfoKHR<'_> {} impl<'a> VideoDecodeH265ProfileInfoKHR<'a> { #[inline] pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH265ProfileIdc) -> Self { self.std_profile_idc = std_profile_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265CapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_level_idc: StdVideoH265LevelIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265CapabilitiesKHR<'_> {} unsafe impl Sync for VideoDecodeH265CapabilitiesKHR<'_> {} impl ::core::default::Default for VideoDecodeH265CapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_level_idc: StdVideoH265LevelIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265CapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH265CapabilitiesKHR<'_> {} impl<'a> VideoDecodeH265CapabilitiesKHR<'a> { #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH265LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265SessionParametersAddInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_vps_count: u32, pub p_std_vp_ss: *const StdVideoH265VideoParameterSet, pub std_sps_count: u32, pub p_std_sp_ss: *const StdVideoH265SequenceParameterSet, pub std_pps_count: u32, pub p_std_pp_ss: *const StdVideoH265PictureParameterSet, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265SessionParametersAddInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH265SessionParametersAddInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH265SessionParametersAddInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_vps_count: u32::default(), p_std_vp_ss: ::core::ptr::null(), std_sps_count: u32::default(), p_std_sp_ss: ::core::ptr::null(), std_pps_count: u32::default(), p_std_pp_ss: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265SessionParametersAddInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR for VideoDecodeH265SessionParametersAddInfoKHR<'_> { } impl<'a> VideoDecodeH265SessionParametersAddInfoKHR<'a> { #[inline] pub fn std_vp_ss(mut self, std_vp_ss: &'a [StdVideoH265VideoParameterSet]) -> Self { self.std_vps_count = std_vp_ss.len() as _; self.p_std_vp_ss = std_vp_ss.as_ptr(); self } #[inline] pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH265SequenceParameterSet]) -> Self { self.std_sps_count = std_sp_ss.len() as _; self.p_std_sp_ss = std_sp_ss.as_ptr(); self } #[inline] pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH265PictureParameterSet]) -> Self { self.std_pps_count = std_pp_ss.len() as _; self.p_std_pp_ss = std_pp_ss.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265SessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_std_vps_count: u32, pub max_std_sps_count: u32, pub max_std_pps_count: u32, pub p_parameters_add_info: *const VideoDecodeH265SessionParametersAddInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265SessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH265SessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH265SessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_std_vps_count: u32::default(), max_std_sps_count: u32::default(), max_std_pps_count: u32::default(), p_parameters_add_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265SessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoDecodeH265SessionParametersCreateInfoKHR<'_> { } impl<'a> VideoDecodeH265SessionParametersCreateInfoKHR<'a> { #[inline] pub fn max_std_vps_count(mut self, max_std_vps_count: u32) -> Self { self.max_std_vps_count = max_std_vps_count; self } #[inline] pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self { self.max_std_sps_count = max_std_sps_count; self } #[inline] pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self { self.max_std_pps_count = max_std_pps_count; self } #[inline] pub fn parameters_add_info( mut self, parameters_add_info: &'a VideoDecodeH265SessionParametersAddInfoKHR<'a>, ) -> Self { self.p_parameters_add_info = parameters_add_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265PictureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_picture_info: *const StdVideoDecodeH265PictureInfo, pub slice_segment_count: u32, pub p_slice_segment_offsets: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265PictureInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH265PictureInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH265PictureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_picture_info: ::core::ptr::null(), slice_segment_count: u32::default(), p_slice_segment_offsets: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265PictureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_PICTURE_INFO_KHR; } unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH265PictureInfoKHR<'_> {} impl<'a> VideoDecodeH265PictureInfoKHR<'a> { #[inline] pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoDecodeH265PictureInfo) -> Self { self.p_std_picture_info = std_picture_info; self } #[inline] pub fn slice_segment_offsets(mut self, slice_segment_offsets: &'a [u32]) -> Self { self.slice_segment_count = slice_segment_offsets.len() as _; self.p_slice_segment_offsets = slice_segment_offsets.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeH265DpbSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_reference_info: *const StdVideoDecodeH265ReferenceInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeH265DpbSlotInfoKHR<'_> {} unsafe impl Sync for VideoDecodeH265DpbSlotInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeH265DpbSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_reference_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeH265DpbSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR; } unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH265DpbSlotInfoKHR<'_> {} impl<'a> VideoDecodeH265DpbSlotInfoKHR<'a> { #[inline] pub fn std_reference_info( mut self, std_reference_info: &'a StdVideoDecodeH265ReferenceInfo, ) -> Self { self.p_std_reference_info = std_reference_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeAV1ProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_profile: StdVideoAV1Profile, pub film_grain_support: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeAV1ProfileInfoKHR<'_> {} unsafe impl Sync for VideoDecodeAV1ProfileInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeAV1ProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_profile: StdVideoAV1Profile::default(), film_grain_support: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeAV1ProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_AV1_PROFILE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeAV1ProfileInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeAV1ProfileInfoKHR<'_> {} impl<'a> VideoDecodeAV1ProfileInfoKHR<'a> { #[inline] pub fn std_profile(mut self, std_profile: StdVideoAV1Profile) -> Self { self.std_profile = std_profile; self } #[inline] pub fn film_grain_support(mut self, film_grain_support: bool) -> Self { self.film_grain_support = film_grain_support.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeAV1CapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_level: StdVideoAV1Level, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeAV1CapabilitiesKHR<'_> {} unsafe impl Sync for VideoDecodeAV1CapabilitiesKHR<'_> {} impl ::core::default::Default for VideoDecodeAV1CapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_level: StdVideoAV1Level::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeAV1CapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_AV1_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeAV1CapabilitiesKHR<'_> {} impl<'a> VideoDecodeAV1CapabilitiesKHR<'a> { #[inline] pub fn max_level(mut self, max_level: StdVideoAV1Level) -> Self { self.max_level = max_level; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeAV1SessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_sequence_header: *const StdVideoAV1SequenceHeader, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeAV1SessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoDecodeAV1SessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeAV1SessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_sequence_header: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeAV1SessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoDecodeAV1SessionParametersCreateInfoKHR<'_> { } impl<'a> VideoDecodeAV1SessionParametersCreateInfoKHR<'a> { #[inline] pub fn std_sequence_header( mut self, std_sequence_header: &'a StdVideoAV1SequenceHeader, ) -> Self { self.p_std_sequence_header = std_sequence_header; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeAV1PictureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_picture_info: *const StdVideoDecodeAV1PictureInfo, pub reference_name_slot_indices: [i32; MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR], pub frame_header_offset: u32, pub tile_count: u32, pub p_tile_offsets: *const u32, pub p_tile_sizes: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeAV1PictureInfoKHR<'_> {} unsafe impl Sync for VideoDecodeAV1PictureInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeAV1PictureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_picture_info: ::core::ptr::null(), reference_name_slot_indices: unsafe { ::core::mem::zeroed() }, frame_header_offset: u32::default(), tile_count: u32::default(), p_tile_offsets: ::core::ptr::null(), p_tile_sizes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeAV1PictureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_AV1_PICTURE_INFO_KHR; } unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeAV1PictureInfoKHR<'_> {} impl<'a> VideoDecodeAV1PictureInfoKHR<'a> { #[inline] pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoDecodeAV1PictureInfo) -> Self { self.p_std_picture_info = std_picture_info; self } #[inline] pub fn reference_name_slot_indices( mut self, reference_name_slot_indices: [i32; MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR], ) -> Self { self.reference_name_slot_indices = reference_name_slot_indices; self } #[inline] pub fn frame_header_offset(mut self, frame_header_offset: u32) -> Self { self.frame_header_offset = frame_header_offset; self } #[inline] pub fn tile_offsets(mut self, tile_offsets: &'a [u32]) -> Self { self.tile_count = tile_offsets.len() as _; self.p_tile_offsets = tile_offsets.as_ptr(); self } #[inline] pub fn tile_sizes(mut self, tile_sizes: &'a [u32]) -> Self { self.tile_count = tile_sizes.len() as _; self.p_tile_sizes = tile_sizes.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoDecodeAV1DpbSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_reference_info: *const StdVideoDecodeAV1ReferenceInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoDecodeAV1DpbSlotInfoKHR<'_> {} unsafe impl Sync for VideoDecodeAV1DpbSlotInfoKHR<'_> {} impl ::core::default::Default for VideoDecodeAV1DpbSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_reference_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoDecodeAV1DpbSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR; } unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeAV1DpbSlotInfoKHR<'_> {} impl<'a> VideoDecodeAV1DpbSlotInfoKHR<'a> { #[inline] pub fn std_reference_info( mut self, std_reference_info: &'a StdVideoDecodeAV1ReferenceInfo, ) -> Self { self.p_std_reference_info = std_reference_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoSessionCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub queue_family_index: u32, pub flags: VideoSessionCreateFlagsKHR, pub p_video_profile: *const VideoProfileInfoKHR<'a>, pub picture_format: Format, pub max_coded_extent: Extent2D, pub reference_picture_format: Format, pub max_dpb_slots: u32, pub max_active_reference_pictures: u32, pub p_std_header_version: *const ExtensionProperties, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoSessionCreateInfoKHR<'_> {} unsafe impl Sync for VideoSessionCreateInfoKHR<'_> {} impl ::core::default::Default for VideoSessionCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), queue_family_index: u32::default(), flags: VideoSessionCreateFlagsKHR::default(), p_video_profile: ::core::ptr::null(), picture_format: Format::default(), max_coded_extent: Extent2D::default(), reference_picture_format: Format::default(), max_dpb_slots: u32::default(), max_active_reference_pictures: u32::default(), p_std_header_version: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoSessionCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_CREATE_INFO_KHR; } pub unsafe trait ExtendsVideoSessionCreateInfoKHR {} impl<'a> VideoSessionCreateInfoKHR<'a> { #[inline] pub fn queue_family_index(mut self, queue_family_index: u32) -> Self { self.queue_family_index = queue_family_index; self } #[inline] pub fn flags(mut self, flags: VideoSessionCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn video_profile(mut self, video_profile: &'a VideoProfileInfoKHR<'a>) -> Self { self.p_video_profile = video_profile; self } #[inline] pub fn picture_format(mut self, picture_format: Format) -> Self { self.picture_format = picture_format; self } #[inline] pub fn max_coded_extent(mut self, max_coded_extent: Extent2D) -> Self { self.max_coded_extent = max_coded_extent; self } #[inline] pub fn reference_picture_format(mut self, reference_picture_format: Format) -> Self { self.reference_picture_format = reference_picture_format; self } #[inline] pub fn max_dpb_slots(mut self, max_dpb_slots: u32) -> Self { self.max_dpb_slots = max_dpb_slots; self } #[inline] pub fn max_active_reference_pictures(mut self, max_active_reference_pictures: u32) -> Self { self.max_active_reference_pictures = max_active_reference_pictures; self } #[inline] pub fn std_header_version(mut self, std_header_version: &'a ExtensionProperties) -> Self { self.p_std_header_version = std_header_version; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoSessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoSessionParametersCreateFlagsKHR, pub video_session_parameters_template: VideoSessionParametersKHR, pub video_session: VideoSessionKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoSessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoSessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoSessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoSessionParametersCreateFlagsKHR::default(), video_session_parameters_template: VideoSessionParametersKHR::default(), video_session: VideoSessionKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoSessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR; } pub unsafe trait ExtendsVideoSessionParametersCreateInfoKHR {} impl<'a> VideoSessionParametersCreateInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoSessionParametersCreateFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn video_session_parameters_template( mut self, video_session_parameters_template: VideoSessionParametersKHR, ) -> Self { self.video_session_parameters_template = video_session_parameters_template; self } #[inline] pub fn video_session(mut self, video_session: VideoSessionKHR) -> Self { self.video_session = video_session; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoSessionParametersUpdateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub update_sequence_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoSessionParametersUpdateInfoKHR<'_> {} unsafe impl Sync for VideoSessionParametersUpdateInfoKHR<'_> {} impl ::core::default::Default for VideoSessionParametersUpdateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), update_sequence_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoSessionParametersUpdateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR; } pub unsafe trait ExtendsVideoSessionParametersUpdateInfoKHR {} impl<'a> VideoSessionParametersUpdateInfoKHR<'a> { #[inline] pub fn update_sequence_count(mut self, update_sequence_count: u32) -> Self { self.update_sequence_count = update_sequence_count; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeSessionParametersGetInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub video_session_parameters: VideoSessionParametersKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeSessionParametersGetInfoKHR<'_> {} unsafe impl Sync for VideoEncodeSessionParametersGetInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeSessionParametersGetInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), video_session_parameters: VideoSessionParametersKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeSessionParametersGetInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR; } pub unsafe trait ExtendsVideoEncodeSessionParametersGetInfoKHR {} impl<'a> VideoEncodeSessionParametersGetInfoKHR<'a> { #[inline] pub fn video_session_parameters( mut self, video_session_parameters: VideoSessionParametersKHR, ) -> Self { self.video_session_parameters = video_session_parameters; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeSessionParametersFeedbackInfoKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub has_overrides: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeSessionParametersFeedbackInfoKHR<'_> {} unsafe impl Sync for VideoEncodeSessionParametersFeedbackInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeSessionParametersFeedbackInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), has_overrides: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeSessionParametersFeedbackInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR; } pub unsafe trait ExtendsVideoEncodeSessionParametersFeedbackInfoKHR {} impl<'a> VideoEncodeSessionParametersFeedbackInfoKHR<'a> { #[inline] pub fn has_overrides(mut self, has_overrides: bool) -> Self { self.has_overrides = has_overrides.into(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoBeginCodingInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoBeginCodingFlagsKHR, pub video_session: VideoSessionKHR, pub video_session_parameters: VideoSessionParametersKHR, pub reference_slot_count: u32, pub p_reference_slots: *const VideoReferenceSlotInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoBeginCodingInfoKHR<'_> {} unsafe impl Sync for VideoBeginCodingInfoKHR<'_> {} impl ::core::default::Default for VideoBeginCodingInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoBeginCodingFlagsKHR::default(), video_session: VideoSessionKHR::default(), video_session_parameters: VideoSessionParametersKHR::default(), reference_slot_count: u32::default(), p_reference_slots: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoBeginCodingInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_BEGIN_CODING_INFO_KHR; } pub unsafe trait ExtendsVideoBeginCodingInfoKHR {} impl<'a> VideoBeginCodingInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoBeginCodingFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn video_session(mut self, video_session: VideoSessionKHR) -> Self { self.video_session = video_session; self } #[inline] pub fn video_session_parameters( mut self, video_session_parameters: VideoSessionParametersKHR, ) -> Self { self.video_session_parameters = video_session_parameters; self } #[inline] pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR<'a>]) -> Self { self.reference_slot_count = reference_slots.len() as _; self.p_reference_slots = reference_slots.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEndCodingInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoEndCodingFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEndCodingInfoKHR<'_> {} unsafe impl Sync for VideoEndCodingInfoKHR<'_> {} impl ::core::default::Default for VideoEndCodingInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoEndCodingFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEndCodingInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_END_CODING_INFO_KHR; } impl<'a> VideoEndCodingInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEndCodingFlagsKHR) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoCodingControlInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoCodingControlFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoCodingControlInfoKHR<'_> {} unsafe impl Sync for VideoCodingControlInfoKHR<'_> {} impl ::core::default::Default for VideoCodingControlInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoCodingControlFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoCodingControlInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_CODING_CONTROL_INFO_KHR; } pub unsafe trait ExtendsVideoCodingControlInfoKHR {} impl<'a> VideoCodingControlInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoCodingControlFlagsKHR) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeUsageInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub video_usage_hints: VideoEncodeUsageFlagsKHR, pub video_content_hints: VideoEncodeContentFlagsKHR, pub tuning_mode: VideoEncodeTuningModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeUsageInfoKHR<'_> {} unsafe impl Sync for VideoEncodeUsageInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeUsageInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), video_usage_hints: VideoEncodeUsageFlagsKHR::default(), video_content_hints: VideoEncodeContentFlagsKHR::default(), tuning_mode: VideoEncodeTuningModeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeUsageInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_USAGE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeUsageInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeUsageInfoKHR<'_> {} impl<'a> VideoEncodeUsageInfoKHR<'a> { #[inline] pub fn video_usage_hints(mut self, video_usage_hints: VideoEncodeUsageFlagsKHR) -> Self { self.video_usage_hints = video_usage_hints; self } #[inline] pub fn video_content_hints(mut self, video_content_hints: VideoEncodeContentFlagsKHR) -> Self { self.video_content_hints = video_content_hints; self } #[inline] pub fn tuning_mode(mut self, tuning_mode: VideoEncodeTuningModeKHR) -> Self { self.tuning_mode = tuning_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoEncodeFlagsKHR, pub dst_buffer: Buffer, pub dst_buffer_offset: DeviceSize, pub dst_buffer_range: DeviceSize, pub src_picture_resource: VideoPictureResourceInfoKHR<'a>, pub p_setup_reference_slot: *const VideoReferenceSlotInfoKHR<'a>, pub reference_slot_count: u32, pub p_reference_slots: *const VideoReferenceSlotInfoKHR<'a>, pub preceding_externally_encoded_bytes: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeInfoKHR<'_> {} unsafe impl Sync for VideoEncodeInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoEncodeFlagsKHR::default(), dst_buffer: Buffer::default(), dst_buffer_offset: DeviceSize::default(), dst_buffer_range: DeviceSize::default(), src_picture_resource: VideoPictureResourceInfoKHR::default(), p_setup_reference_slot: ::core::ptr::null(), reference_slot_count: u32::default(), p_reference_slots: ::core::ptr::null(), preceding_externally_encoded_bytes: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_INFO_KHR; } pub unsafe trait ExtendsVideoEncodeInfoKHR {} impl<'a> VideoEncodeInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn dst_buffer(mut self, dst_buffer: Buffer) -> Self { self.dst_buffer = dst_buffer; self } #[inline] pub fn dst_buffer_offset(mut self, dst_buffer_offset: DeviceSize) -> Self { self.dst_buffer_offset = dst_buffer_offset; self } #[inline] pub fn dst_buffer_range(mut self, dst_buffer_range: DeviceSize) -> Self { self.dst_buffer_range = dst_buffer_range; self } #[inline] pub fn src_picture_resource( mut self, src_picture_resource: VideoPictureResourceInfoKHR<'a>, ) -> Self { self.src_picture_resource = src_picture_resource; self } #[inline] pub fn setup_reference_slot( mut self, setup_reference_slot: &'a VideoReferenceSlotInfoKHR<'a>, ) -> Self { self.p_setup_reference_slot = setup_reference_slot; self } #[inline] pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR<'a>]) -> Self { self.reference_slot_count = reference_slots.len() as _; self.p_reference_slots = reference_slots.as_ptr(); self } #[inline] pub fn preceding_externally_encoded_bytes( mut self, preceding_externally_encoded_bytes: u32, ) -> Self { self.preceding_externally_encoded_bytes = preceding_externally_encoded_bytes; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueryPoolVideoEncodeFeedbackCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub encode_feedback_flags: VideoEncodeFeedbackFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueryPoolVideoEncodeFeedbackCreateInfoKHR<'_> {} unsafe impl Sync for QueryPoolVideoEncodeFeedbackCreateInfoKHR<'_> {} impl ::core::default::Default for QueryPoolVideoEncodeFeedbackCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), encode_feedback_flags: VideoEncodeFeedbackFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueryPoolVideoEncodeFeedbackCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR; } unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolVideoEncodeFeedbackCreateInfoKHR<'_> {} impl<'a> QueryPoolVideoEncodeFeedbackCreateInfoKHR<'a> { #[inline] pub fn encode_feedback_flags( mut self, encode_feedback_flags: VideoEncodeFeedbackFlagsKHR, ) -> Self { self.encode_feedback_flags = encode_feedback_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeQualityLevelInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub quality_level: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeQualityLevelInfoKHR<'_> {} unsafe impl Sync for VideoEncodeQualityLevelInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeQualityLevelInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), quality_level: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeQualityLevelInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR; } unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeQualityLevelInfoKHR<'_> {} unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoEncodeQualityLevelInfoKHR<'_> {} impl<'a> VideoEncodeQualityLevelInfoKHR<'a> { #[inline] pub fn quality_level(mut self, quality_level: u32) -> Self { self.quality_level = quality_level; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_video_profile: *const VideoProfileInfoKHR<'a>, pub quality_level: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'_> {} unsafe impl Sync for PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'_> {} impl ::core::default::Default for PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_video_profile: ::core::ptr::null(), quality_level: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR; } impl<'a> PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'a> { #[inline] pub fn video_profile(mut self, video_profile: &'a VideoProfileInfoKHR<'a>) -> Self { self.p_video_profile = video_profile; self } #[inline] pub fn quality_level(mut self, quality_level: u32) -> Self { self.quality_level = quality_level; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeQualityLevelPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub preferred_rate_control_mode: VideoEncodeRateControlModeFlagsKHR, pub preferred_rate_control_layer_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeQualityLevelPropertiesKHR<'_> {} unsafe impl Sync for VideoEncodeQualityLevelPropertiesKHR<'_> {} impl ::core::default::Default for VideoEncodeQualityLevelPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), preferred_rate_control_mode: VideoEncodeRateControlModeFlagsKHR::default(), preferred_rate_control_layer_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeQualityLevelPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR; } pub unsafe trait ExtendsVideoEncodeQualityLevelPropertiesKHR {} impl<'a> VideoEncodeQualityLevelPropertiesKHR<'a> { #[inline] pub fn preferred_rate_control_mode( mut self, preferred_rate_control_mode: VideoEncodeRateControlModeFlagsKHR, ) -> Self { self.preferred_rate_control_mode = preferred_rate_control_mode; self } #[inline] pub fn preferred_rate_control_layer_count( mut self, preferred_rate_control_layer_count: u32, ) -> Self { self.preferred_rate_control_layer_count = preferred_rate_control_layer_count; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeRateControlInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoEncodeRateControlFlagsKHR, pub rate_control_mode: VideoEncodeRateControlModeFlagsKHR, pub layer_count: u32, pub p_layers: *const VideoEncodeRateControlLayerInfoKHR<'a>, pub virtual_buffer_size_in_ms: u32, pub initial_virtual_buffer_size_in_ms: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeRateControlInfoKHR<'_> {} unsafe impl Sync for VideoEncodeRateControlInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeRateControlInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoEncodeRateControlFlagsKHR::default(), rate_control_mode: VideoEncodeRateControlModeFlagsKHR::default(), layer_count: u32::default(), p_layers: ::core::ptr::null(), virtual_buffer_size_in_ms: u32::default(), initial_virtual_buffer_size_in_ms: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeRateControlInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_RATE_CONTROL_INFO_KHR; } unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeRateControlInfoKHR<'_> {} unsafe impl ExtendsVideoBeginCodingInfoKHR for VideoEncodeRateControlInfoKHR<'_> {} impl<'a> VideoEncodeRateControlInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeRateControlFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn rate_control_mode( mut self, rate_control_mode: VideoEncodeRateControlModeFlagsKHR, ) -> Self { self.rate_control_mode = rate_control_mode; self } #[inline] pub fn layers(mut self, layers: &'a [VideoEncodeRateControlLayerInfoKHR<'a>]) -> Self { self.layer_count = layers.len() as _; self.p_layers = layers.as_ptr(); self } #[inline] pub fn virtual_buffer_size_in_ms(mut self, virtual_buffer_size_in_ms: u32) -> Self { self.virtual_buffer_size_in_ms = virtual_buffer_size_in_ms; self } #[inline] pub fn initial_virtual_buffer_size_in_ms( mut self, initial_virtual_buffer_size_in_ms: u32, ) -> Self { self.initial_virtual_buffer_size_in_ms = initial_virtual_buffer_size_in_ms; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeRateControlLayerInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub average_bitrate: u64, pub max_bitrate: u64, pub frame_rate_numerator: u32, pub frame_rate_denominator: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeRateControlLayerInfoKHR<'_> {} unsafe impl Sync for VideoEncodeRateControlLayerInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeRateControlLayerInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), average_bitrate: u64::default(), max_bitrate: u64::default(), frame_rate_numerator: u32::default(), frame_rate_denominator: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeRateControlLayerInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR; } pub unsafe trait ExtendsVideoEncodeRateControlLayerInfoKHR {} impl<'a> VideoEncodeRateControlLayerInfoKHR<'a> { #[inline] pub fn average_bitrate(mut self, average_bitrate: u64) -> Self { self.average_bitrate = average_bitrate; self } #[inline] pub fn max_bitrate(mut self, max_bitrate: u64) -> Self { self.max_bitrate = max_bitrate; self } #[inline] pub fn frame_rate_numerator(mut self, frame_rate_numerator: u32) -> Self { self.frame_rate_numerator = frame_rate_numerator; self } #[inline] pub fn frame_rate_denominator(mut self, frame_rate_denominator: u32) -> Self { self.frame_rate_denominator = frame_rate_denominator; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeCapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: VideoEncodeCapabilityFlagsKHR, pub rate_control_modes: VideoEncodeRateControlModeFlagsKHR, pub max_rate_control_layers: u32, pub max_bitrate: u64, pub max_quality_levels: u32, pub encode_input_picture_granularity: Extent2D, pub supported_encode_feedback_flags: VideoEncodeFeedbackFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeCapabilitiesKHR<'_> {} unsafe impl Sync for VideoEncodeCapabilitiesKHR<'_> {} impl ::core::default::Default for VideoEncodeCapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: VideoEncodeCapabilityFlagsKHR::default(), rate_control_modes: VideoEncodeRateControlModeFlagsKHR::default(), max_rate_control_layers: u32::default(), max_bitrate: u64::default(), max_quality_levels: u32::default(), encode_input_picture_granularity: Extent2D::default(), supported_encode_feedback_flags: VideoEncodeFeedbackFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeCapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeCapabilitiesKHR<'_> {} impl<'a> VideoEncodeCapabilitiesKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeCapabilityFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn rate_control_modes( mut self, rate_control_modes: VideoEncodeRateControlModeFlagsKHR, ) -> Self { self.rate_control_modes = rate_control_modes; self } #[inline] pub fn max_rate_control_layers(mut self, max_rate_control_layers: u32) -> Self { self.max_rate_control_layers = max_rate_control_layers; self } #[inline] pub fn max_bitrate(mut self, max_bitrate: u64) -> Self { self.max_bitrate = max_bitrate; self } #[inline] pub fn max_quality_levels(mut self, max_quality_levels: u32) -> Self { self.max_quality_levels = max_quality_levels; self } #[inline] pub fn encode_input_picture_granularity( mut self, encode_input_picture_granularity: Extent2D, ) -> Self { self.encode_input_picture_granularity = encode_input_picture_granularity; self } #[inline] pub fn supported_encode_feedback_flags( mut self, supported_encode_feedback_flags: VideoEncodeFeedbackFlagsKHR, ) -> Self { self.supported_encode_feedback_flags = supported_encode_feedback_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264CapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: VideoEncodeH264CapabilityFlagsKHR, pub max_level_idc: StdVideoH264LevelIdc, pub max_slice_count: u32, pub max_p_picture_l0_reference_count: u32, pub max_b_picture_l0_reference_count: u32, pub max_l1_reference_count: u32, pub max_temporal_layer_count: u32, pub expect_dyadic_temporal_layer_pattern: Bool32, pub min_qp: i32, pub max_qp: i32, pub prefers_gop_remaining_frames: Bool32, pub requires_gop_remaining_frames: Bool32, pub std_syntax_flags: VideoEncodeH264StdFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264CapabilitiesKHR<'_> {} unsafe impl Sync for VideoEncodeH264CapabilitiesKHR<'_> {} impl ::core::default::Default for VideoEncodeH264CapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: VideoEncodeH264CapabilityFlagsKHR::default(), max_level_idc: StdVideoH264LevelIdc::default(), max_slice_count: u32::default(), max_p_picture_l0_reference_count: u32::default(), max_b_picture_l0_reference_count: u32::default(), max_l1_reference_count: u32::default(), max_temporal_layer_count: u32::default(), expect_dyadic_temporal_layer_pattern: Bool32::default(), min_qp: i32::default(), max_qp: i32::default(), prefers_gop_remaining_frames: Bool32::default(), requires_gop_remaining_frames: Bool32::default(), std_syntax_flags: VideoEncodeH264StdFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264CapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH264CapabilitiesKHR<'_> {} impl<'a> VideoEncodeH264CapabilitiesKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeH264CapabilityFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH264LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } #[inline] pub fn max_slice_count(mut self, max_slice_count: u32) -> Self { self.max_slice_count = max_slice_count; self } #[inline] pub fn max_p_picture_l0_reference_count( mut self, max_p_picture_l0_reference_count: u32, ) -> Self { self.max_p_picture_l0_reference_count = max_p_picture_l0_reference_count; self } #[inline] pub fn max_b_picture_l0_reference_count( mut self, max_b_picture_l0_reference_count: u32, ) -> Self { self.max_b_picture_l0_reference_count = max_b_picture_l0_reference_count; self } #[inline] pub fn max_l1_reference_count(mut self, max_l1_reference_count: u32) -> Self { self.max_l1_reference_count = max_l1_reference_count; self } #[inline] pub fn max_temporal_layer_count(mut self, max_temporal_layer_count: u32) -> Self { self.max_temporal_layer_count = max_temporal_layer_count; self } #[inline] pub fn expect_dyadic_temporal_layer_pattern( mut self, expect_dyadic_temporal_layer_pattern: bool, ) -> Self { self.expect_dyadic_temporal_layer_pattern = expect_dyadic_temporal_layer_pattern.into(); self } #[inline] pub fn min_qp(mut self, min_qp: i32) -> Self { self.min_qp = min_qp; self } #[inline] pub fn max_qp(mut self, max_qp: i32) -> Self { self.max_qp = max_qp; self } #[inline] pub fn prefers_gop_remaining_frames(mut self, prefers_gop_remaining_frames: bool) -> Self { self.prefers_gop_remaining_frames = prefers_gop_remaining_frames.into(); self } #[inline] pub fn requires_gop_remaining_frames(mut self, requires_gop_remaining_frames: bool) -> Self { self.requires_gop_remaining_frames = requires_gop_remaining_frames.into(); self } #[inline] pub fn std_syntax_flags(mut self, std_syntax_flags: VideoEncodeH264StdFlagsKHR) -> Self { self.std_syntax_flags = std_syntax_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264QualityLevelPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub preferred_rate_control_flags: VideoEncodeH264RateControlFlagsKHR, pub preferred_gop_frame_count: u32, pub preferred_idr_period: u32, pub preferred_consecutive_b_frame_count: u32, pub preferred_temporal_layer_count: u32, pub preferred_constant_qp: VideoEncodeH264QpKHR, pub preferred_max_l0_reference_count: u32, pub preferred_max_l1_reference_count: u32, pub preferred_std_entropy_coding_mode_flag: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264QualityLevelPropertiesKHR<'_> {} unsafe impl Sync for VideoEncodeH264QualityLevelPropertiesKHR<'_> {} impl ::core::default::Default for VideoEncodeH264QualityLevelPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), preferred_rate_control_flags: VideoEncodeH264RateControlFlagsKHR::default(), preferred_gop_frame_count: u32::default(), preferred_idr_period: u32::default(), preferred_consecutive_b_frame_count: u32::default(), preferred_temporal_layer_count: u32::default(), preferred_constant_qp: VideoEncodeH264QpKHR::default(), preferred_max_l0_reference_count: u32::default(), preferred_max_l1_reference_count: u32::default(), preferred_std_entropy_coding_mode_flag: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264QualityLevelPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR; } unsafe impl ExtendsVideoEncodeQualityLevelPropertiesKHR for VideoEncodeH264QualityLevelPropertiesKHR<'_> { } impl<'a> VideoEncodeH264QualityLevelPropertiesKHR<'a> { #[inline] pub fn preferred_rate_control_flags( mut self, preferred_rate_control_flags: VideoEncodeH264RateControlFlagsKHR, ) -> Self { self.preferred_rate_control_flags = preferred_rate_control_flags; self } #[inline] pub fn preferred_gop_frame_count(mut self, preferred_gop_frame_count: u32) -> Self { self.preferred_gop_frame_count = preferred_gop_frame_count; self } #[inline] pub fn preferred_idr_period(mut self, preferred_idr_period: u32) -> Self { self.preferred_idr_period = preferred_idr_period; self } #[inline] pub fn preferred_consecutive_b_frame_count( mut self, preferred_consecutive_b_frame_count: u32, ) -> Self { self.preferred_consecutive_b_frame_count = preferred_consecutive_b_frame_count; self } #[inline] pub fn preferred_temporal_layer_count(mut self, preferred_temporal_layer_count: u32) -> Self { self.preferred_temporal_layer_count = preferred_temporal_layer_count; self } #[inline] pub fn preferred_constant_qp(mut self, preferred_constant_qp: VideoEncodeH264QpKHR) -> Self { self.preferred_constant_qp = preferred_constant_qp; self } #[inline] pub fn preferred_max_l0_reference_count( mut self, preferred_max_l0_reference_count: u32, ) -> Self { self.preferred_max_l0_reference_count = preferred_max_l0_reference_count; self } #[inline] pub fn preferred_max_l1_reference_count( mut self, preferred_max_l1_reference_count: u32, ) -> Self { self.preferred_max_l1_reference_count = preferred_max_l1_reference_count; self } #[inline] pub fn preferred_std_entropy_coding_mode_flag( mut self, preferred_std_entropy_coding_mode_flag: bool, ) -> Self { self.preferred_std_entropy_coding_mode_flag = preferred_std_entropy_coding_mode_flag.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264SessionCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_max_level_idc: Bool32, pub max_level_idc: StdVideoH264LevelIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264SessionCreateInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264SessionCreateInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264SessionCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_max_level_idc: Bool32::default(), max_level_idc: StdVideoH264LevelIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264SessionCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionCreateInfoKHR for VideoEncodeH264SessionCreateInfoKHR<'_> {} impl<'a> VideoEncodeH264SessionCreateInfoKHR<'a> { #[inline] pub fn use_max_level_idc(mut self, use_max_level_idc: bool) -> Self { self.use_max_level_idc = use_max_level_idc.into(); self } #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH264LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264SessionParametersAddInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_sps_count: u32, pub p_std_sp_ss: *const StdVideoH264SequenceParameterSet, pub std_pps_count: u32, pub p_std_pp_ss: *const StdVideoH264PictureParameterSet, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264SessionParametersAddInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264SessionParametersAddInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264SessionParametersAddInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_sps_count: u32::default(), p_std_sp_ss: ::core::ptr::null(), std_pps_count: u32::default(), p_std_pp_ss: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264SessionParametersAddInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR for VideoEncodeH264SessionParametersAddInfoKHR<'_> { } impl<'a> VideoEncodeH264SessionParametersAddInfoKHR<'a> { #[inline] pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH264SequenceParameterSet]) -> Self { self.std_sps_count = std_sp_ss.len() as _; self.p_std_sp_ss = std_sp_ss.as_ptr(); self } #[inline] pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH264PictureParameterSet]) -> Self { self.std_pps_count = std_pp_ss.len() as _; self.p_std_pp_ss = std_pp_ss.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264SessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_std_sps_count: u32, pub max_std_pps_count: u32, pub p_parameters_add_info: *const VideoEncodeH264SessionParametersAddInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264SessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264SessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264SessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_std_sps_count: u32::default(), max_std_pps_count: u32::default(), p_parameters_add_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264SessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoEncodeH264SessionParametersCreateInfoKHR<'_> { } impl<'a> VideoEncodeH264SessionParametersCreateInfoKHR<'a> { #[inline] pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self { self.max_std_sps_count = max_std_sps_count; self } #[inline] pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self { self.max_std_pps_count = max_std_pps_count; self } #[inline] pub fn parameters_add_info( mut self, parameters_add_info: &'a VideoEncodeH264SessionParametersAddInfoKHR<'a>, ) -> Self { self.p_parameters_add_info = parameters_add_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264SessionParametersGetInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub write_std_sps: Bool32, pub write_std_pps: Bool32, pub std_sps_id: u32, pub std_pps_id: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264SessionParametersGetInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264SessionParametersGetInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264SessionParametersGetInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), write_std_sps: Bool32::default(), write_std_pps: Bool32::default(), std_sps_id: u32::default(), std_pps_id: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264SessionParametersGetInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR; } unsafe impl ExtendsVideoEncodeSessionParametersGetInfoKHR for VideoEncodeH264SessionParametersGetInfoKHR<'_> { } impl<'a> VideoEncodeH264SessionParametersGetInfoKHR<'a> { #[inline] pub fn write_std_sps(mut self, write_std_sps: bool) -> Self { self.write_std_sps = write_std_sps.into(); self } #[inline] pub fn write_std_pps(mut self, write_std_pps: bool) -> Self { self.write_std_pps = write_std_pps.into(); self } #[inline] pub fn std_sps_id(mut self, std_sps_id: u32) -> Self { self.std_sps_id = std_sps_id; self } #[inline] pub fn std_pps_id(mut self, std_pps_id: u32) -> Self { self.std_pps_id = std_pps_id; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264SessionParametersFeedbackInfoKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub has_std_sps_overrides: Bool32, pub has_std_pps_overrides: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264SessionParametersFeedbackInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264SessionParametersFeedbackInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264SessionParametersFeedbackInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), has_std_sps_overrides: Bool32::default(), has_std_pps_overrides: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264SessionParametersFeedbackInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR; } unsafe impl ExtendsVideoEncodeSessionParametersFeedbackInfoKHR for VideoEncodeH264SessionParametersFeedbackInfoKHR<'_> { } impl<'a> VideoEncodeH264SessionParametersFeedbackInfoKHR<'a> { #[inline] pub fn has_std_sps_overrides(mut self, has_std_sps_overrides: bool) -> Self { self.has_std_sps_overrides = has_std_sps_overrides.into(); self } #[inline] pub fn has_std_pps_overrides(mut self, has_std_pps_overrides: bool) -> Self { self.has_std_pps_overrides = has_std_pps_overrides.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264DpbSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_reference_info: *const StdVideoEncodeH264ReferenceInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264DpbSlotInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264DpbSlotInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264DpbSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_reference_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264DpbSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR; } unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoEncodeH264DpbSlotInfoKHR<'_> {} impl<'a> VideoEncodeH264DpbSlotInfoKHR<'a> { #[inline] pub fn std_reference_info( mut self, std_reference_info: &'a StdVideoEncodeH264ReferenceInfo, ) -> Self { self.p_std_reference_info = std_reference_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264PictureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub nalu_slice_entry_count: u32, pub p_nalu_slice_entries: *const VideoEncodeH264NaluSliceInfoKHR<'a>, pub p_std_picture_info: *const StdVideoEncodeH264PictureInfo, pub generate_prefix_nalu: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264PictureInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264PictureInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264PictureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), nalu_slice_entry_count: u32::default(), p_nalu_slice_entries: ::core::ptr::null(), p_std_picture_info: ::core::ptr::null(), generate_prefix_nalu: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264PictureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_PICTURE_INFO_KHR; } unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH264PictureInfoKHR<'_> {} impl<'a> VideoEncodeH264PictureInfoKHR<'a> { #[inline] pub fn nalu_slice_entries( mut self, nalu_slice_entries: &'a [VideoEncodeH264NaluSliceInfoKHR<'a>], ) -> Self { self.nalu_slice_entry_count = nalu_slice_entries.len() as _; self.p_nalu_slice_entries = nalu_slice_entries.as_ptr(); self } #[inline] pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoEncodeH264PictureInfo) -> Self { self.p_std_picture_info = std_picture_info; self } #[inline] pub fn generate_prefix_nalu(mut self, generate_prefix_nalu: bool) -> Self { self.generate_prefix_nalu = generate_prefix_nalu.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264ProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_profile_idc: StdVideoH264ProfileIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264ProfileInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264ProfileInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264ProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_profile_idc: StdVideoH264ProfileIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264ProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_PROFILE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH264ProfileInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH264ProfileInfoKHR<'_> {} impl<'a> VideoEncodeH264ProfileInfoKHR<'a> { #[inline] pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH264ProfileIdc) -> Self { self.std_profile_idc = std_profile_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264NaluSliceInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub constant_qp: i32, pub p_std_slice_header: *const StdVideoEncodeH264SliceHeader, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264NaluSliceInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264NaluSliceInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264NaluSliceInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), constant_qp: i32::default(), p_std_slice_header: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264NaluSliceInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR; } impl<'a> VideoEncodeH264NaluSliceInfoKHR<'a> { #[inline] pub fn constant_qp(mut self, constant_qp: i32) -> Self { self.constant_qp = constant_qp; self } #[inline] pub fn std_slice_header(mut self, std_slice_header: &'a StdVideoEncodeH264SliceHeader) -> Self { self.p_std_slice_header = std_slice_header; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264RateControlInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoEncodeH264RateControlFlagsKHR, pub gop_frame_count: u32, pub idr_period: u32, pub consecutive_b_frame_count: u32, pub temporal_layer_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264RateControlInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264RateControlInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264RateControlInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoEncodeH264RateControlFlagsKHR::default(), gop_frame_count: u32::default(), idr_period: u32::default(), consecutive_b_frame_count: u32::default(), temporal_layer_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264RateControlInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR; } unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH264RateControlInfoKHR<'_> {} unsafe impl ExtendsVideoBeginCodingInfoKHR for VideoEncodeH264RateControlInfoKHR<'_> {} impl<'a> VideoEncodeH264RateControlInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeH264RateControlFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn gop_frame_count(mut self, gop_frame_count: u32) -> Self { self.gop_frame_count = gop_frame_count; self } #[inline] pub fn idr_period(mut self, idr_period: u32) -> Self { self.idr_period = idr_period; self } #[inline] pub fn consecutive_b_frame_count(mut self, consecutive_b_frame_count: u32) -> Self { self.consecutive_b_frame_count = consecutive_b_frame_count; self } #[inline] pub fn temporal_layer_count(mut self, temporal_layer_count: u32) -> Self { self.temporal_layer_count = temporal_layer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VideoEncodeH264QpKHR { pub qp_i: i32, pub qp_p: i32, pub qp_b: i32, } impl VideoEncodeH264QpKHR { #[inline] pub fn qp_i(mut self, qp_i: i32) -> Self { self.qp_i = qp_i; self } #[inline] pub fn qp_p(mut self, qp_p: i32) -> Self { self.qp_p = qp_p; self } #[inline] pub fn qp_b(mut self, qp_b: i32) -> Self { self.qp_b = qp_b; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VideoEncodeH264FrameSizeKHR { pub frame_i_size: u32, pub frame_p_size: u32, pub frame_b_size: u32, } impl VideoEncodeH264FrameSizeKHR { #[inline] pub fn frame_i_size(mut self, frame_i_size: u32) -> Self { self.frame_i_size = frame_i_size; self } #[inline] pub fn frame_p_size(mut self, frame_p_size: u32) -> Self { self.frame_p_size = frame_p_size; self } #[inline] pub fn frame_b_size(mut self, frame_b_size: u32) -> Self { self.frame_b_size = frame_b_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264GopRemainingFrameInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_gop_remaining_frames: Bool32, pub gop_remaining_i: u32, pub gop_remaining_p: u32, pub gop_remaining_b: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264GopRemainingFrameInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264GopRemainingFrameInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264GopRemainingFrameInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_gop_remaining_frames: Bool32::default(), gop_remaining_i: u32::default(), gop_remaining_p: u32::default(), gop_remaining_b: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264GopRemainingFrameInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR; } unsafe impl ExtendsVideoBeginCodingInfoKHR for VideoEncodeH264GopRemainingFrameInfoKHR<'_> {} impl<'a> VideoEncodeH264GopRemainingFrameInfoKHR<'a> { #[inline] pub fn use_gop_remaining_frames(mut self, use_gop_remaining_frames: bool) -> Self { self.use_gop_remaining_frames = use_gop_remaining_frames.into(); self } #[inline] pub fn gop_remaining_i(mut self, gop_remaining_i: u32) -> Self { self.gop_remaining_i = gop_remaining_i; self } #[inline] pub fn gop_remaining_p(mut self, gop_remaining_p: u32) -> Self { self.gop_remaining_p = gop_remaining_p; self } #[inline] pub fn gop_remaining_b(mut self, gop_remaining_b: u32) -> Self { self.gop_remaining_b = gop_remaining_b; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH264RateControlLayerInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_min_qp: Bool32, pub min_qp: VideoEncodeH264QpKHR, pub use_max_qp: Bool32, pub max_qp: VideoEncodeH264QpKHR, pub use_max_frame_size: Bool32, pub max_frame_size: VideoEncodeH264FrameSizeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH264RateControlLayerInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH264RateControlLayerInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH264RateControlLayerInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_min_qp: Bool32::default(), min_qp: VideoEncodeH264QpKHR::default(), use_max_qp: Bool32::default(), max_qp: VideoEncodeH264QpKHR::default(), use_max_frame_size: Bool32::default(), max_frame_size: VideoEncodeH264FrameSizeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH264RateControlLayerInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR; } unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR for VideoEncodeH264RateControlLayerInfoKHR<'_> { } impl<'a> VideoEncodeH264RateControlLayerInfoKHR<'a> { #[inline] pub fn use_min_qp(mut self, use_min_qp: bool) -> Self { self.use_min_qp = use_min_qp.into(); self } #[inline] pub fn min_qp(mut self, min_qp: VideoEncodeH264QpKHR) -> Self { self.min_qp = min_qp; self } #[inline] pub fn use_max_qp(mut self, use_max_qp: bool) -> Self { self.use_max_qp = use_max_qp.into(); self } #[inline] pub fn max_qp(mut self, max_qp: VideoEncodeH264QpKHR) -> Self { self.max_qp = max_qp; self } #[inline] pub fn use_max_frame_size(mut self, use_max_frame_size: bool) -> Self { self.use_max_frame_size = use_max_frame_size.into(); self } #[inline] pub fn max_frame_size(mut self, max_frame_size: VideoEncodeH264FrameSizeKHR) -> Self { self.max_frame_size = max_frame_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265CapabilitiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: VideoEncodeH265CapabilityFlagsKHR, pub max_level_idc: StdVideoH265LevelIdc, pub max_slice_segment_count: u32, pub max_tiles: Extent2D, pub ctb_sizes: VideoEncodeH265CtbSizeFlagsKHR, pub transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsKHR, pub max_p_picture_l0_reference_count: u32, pub max_b_picture_l0_reference_count: u32, pub max_l1_reference_count: u32, pub max_sub_layer_count: u32, pub expect_dyadic_temporal_sub_layer_pattern: Bool32, pub min_qp: i32, pub max_qp: i32, pub prefers_gop_remaining_frames: Bool32, pub requires_gop_remaining_frames: Bool32, pub std_syntax_flags: VideoEncodeH265StdFlagsKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265CapabilitiesKHR<'_> {} unsafe impl Sync for VideoEncodeH265CapabilitiesKHR<'_> {} impl ::core::default::Default for VideoEncodeH265CapabilitiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: VideoEncodeH265CapabilityFlagsKHR::default(), max_level_idc: StdVideoH265LevelIdc::default(), max_slice_segment_count: u32::default(), max_tiles: Extent2D::default(), ctb_sizes: VideoEncodeH265CtbSizeFlagsKHR::default(), transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsKHR::default(), max_p_picture_l0_reference_count: u32::default(), max_b_picture_l0_reference_count: u32::default(), max_l1_reference_count: u32::default(), max_sub_layer_count: u32::default(), expect_dyadic_temporal_sub_layer_pattern: Bool32::default(), min_qp: i32::default(), max_qp: i32::default(), prefers_gop_remaining_frames: Bool32::default(), requires_gop_remaining_frames: Bool32::default(), std_syntax_flags: VideoEncodeH265StdFlagsKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265CapabilitiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_CAPABILITIES_KHR; } unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH265CapabilitiesKHR<'_> {} impl<'a> VideoEncodeH265CapabilitiesKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeH265CapabilityFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH265LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } #[inline] pub fn max_slice_segment_count(mut self, max_slice_segment_count: u32) -> Self { self.max_slice_segment_count = max_slice_segment_count; self } #[inline] pub fn max_tiles(mut self, max_tiles: Extent2D) -> Self { self.max_tiles = max_tiles; self } #[inline] pub fn ctb_sizes(mut self, ctb_sizes: VideoEncodeH265CtbSizeFlagsKHR) -> Self { self.ctb_sizes = ctb_sizes; self } #[inline] pub fn transform_block_sizes( mut self, transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsKHR, ) -> Self { self.transform_block_sizes = transform_block_sizes; self } #[inline] pub fn max_p_picture_l0_reference_count( mut self, max_p_picture_l0_reference_count: u32, ) -> Self { self.max_p_picture_l0_reference_count = max_p_picture_l0_reference_count; self } #[inline] pub fn max_b_picture_l0_reference_count( mut self, max_b_picture_l0_reference_count: u32, ) -> Self { self.max_b_picture_l0_reference_count = max_b_picture_l0_reference_count; self } #[inline] pub fn max_l1_reference_count(mut self, max_l1_reference_count: u32) -> Self { self.max_l1_reference_count = max_l1_reference_count; self } #[inline] pub fn max_sub_layer_count(mut self, max_sub_layer_count: u32) -> Self { self.max_sub_layer_count = max_sub_layer_count; self } #[inline] pub fn expect_dyadic_temporal_sub_layer_pattern( mut self, expect_dyadic_temporal_sub_layer_pattern: bool, ) -> Self { self.expect_dyadic_temporal_sub_layer_pattern = expect_dyadic_temporal_sub_layer_pattern.into(); self } #[inline] pub fn min_qp(mut self, min_qp: i32) -> Self { self.min_qp = min_qp; self } #[inline] pub fn max_qp(mut self, max_qp: i32) -> Self { self.max_qp = max_qp; self } #[inline] pub fn prefers_gop_remaining_frames(mut self, prefers_gop_remaining_frames: bool) -> Self { self.prefers_gop_remaining_frames = prefers_gop_remaining_frames.into(); self } #[inline] pub fn requires_gop_remaining_frames(mut self, requires_gop_remaining_frames: bool) -> Self { self.requires_gop_remaining_frames = requires_gop_remaining_frames.into(); self } #[inline] pub fn std_syntax_flags(mut self, std_syntax_flags: VideoEncodeH265StdFlagsKHR) -> Self { self.std_syntax_flags = std_syntax_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265QualityLevelPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub preferred_rate_control_flags: VideoEncodeH265RateControlFlagsKHR, pub preferred_gop_frame_count: u32, pub preferred_idr_period: u32, pub preferred_consecutive_b_frame_count: u32, pub preferred_sub_layer_count: u32, pub preferred_constant_qp: VideoEncodeH265QpKHR, pub preferred_max_l0_reference_count: u32, pub preferred_max_l1_reference_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265QualityLevelPropertiesKHR<'_> {} unsafe impl Sync for VideoEncodeH265QualityLevelPropertiesKHR<'_> {} impl ::core::default::Default for VideoEncodeH265QualityLevelPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), preferred_rate_control_flags: VideoEncodeH265RateControlFlagsKHR::default(), preferred_gop_frame_count: u32::default(), preferred_idr_period: u32::default(), preferred_consecutive_b_frame_count: u32::default(), preferred_sub_layer_count: u32::default(), preferred_constant_qp: VideoEncodeH265QpKHR::default(), preferred_max_l0_reference_count: u32::default(), preferred_max_l1_reference_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265QualityLevelPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR; } unsafe impl ExtendsVideoEncodeQualityLevelPropertiesKHR for VideoEncodeH265QualityLevelPropertiesKHR<'_> { } impl<'a> VideoEncodeH265QualityLevelPropertiesKHR<'a> { #[inline] pub fn preferred_rate_control_flags( mut self, preferred_rate_control_flags: VideoEncodeH265RateControlFlagsKHR, ) -> Self { self.preferred_rate_control_flags = preferred_rate_control_flags; self } #[inline] pub fn preferred_gop_frame_count(mut self, preferred_gop_frame_count: u32) -> Self { self.preferred_gop_frame_count = preferred_gop_frame_count; self } #[inline] pub fn preferred_idr_period(mut self, preferred_idr_period: u32) -> Self { self.preferred_idr_period = preferred_idr_period; self } #[inline] pub fn preferred_consecutive_b_frame_count( mut self, preferred_consecutive_b_frame_count: u32, ) -> Self { self.preferred_consecutive_b_frame_count = preferred_consecutive_b_frame_count; self } #[inline] pub fn preferred_sub_layer_count(mut self, preferred_sub_layer_count: u32) -> Self { self.preferred_sub_layer_count = preferred_sub_layer_count; self } #[inline] pub fn preferred_constant_qp(mut self, preferred_constant_qp: VideoEncodeH265QpKHR) -> Self { self.preferred_constant_qp = preferred_constant_qp; self } #[inline] pub fn preferred_max_l0_reference_count( mut self, preferred_max_l0_reference_count: u32, ) -> Self { self.preferred_max_l0_reference_count = preferred_max_l0_reference_count; self } #[inline] pub fn preferred_max_l1_reference_count( mut self, preferred_max_l1_reference_count: u32, ) -> Self { self.preferred_max_l1_reference_count = preferred_max_l1_reference_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265SessionCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_max_level_idc: Bool32, pub max_level_idc: StdVideoH265LevelIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265SessionCreateInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265SessionCreateInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265SessionCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_max_level_idc: Bool32::default(), max_level_idc: StdVideoH265LevelIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265SessionCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionCreateInfoKHR for VideoEncodeH265SessionCreateInfoKHR<'_> {} impl<'a> VideoEncodeH265SessionCreateInfoKHR<'a> { #[inline] pub fn use_max_level_idc(mut self, use_max_level_idc: bool) -> Self { self.use_max_level_idc = use_max_level_idc.into(); self } #[inline] pub fn max_level_idc(mut self, max_level_idc: StdVideoH265LevelIdc) -> Self { self.max_level_idc = max_level_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265SessionParametersAddInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_vps_count: u32, pub p_std_vp_ss: *const StdVideoH265VideoParameterSet, pub std_sps_count: u32, pub p_std_sp_ss: *const StdVideoH265SequenceParameterSet, pub std_pps_count: u32, pub p_std_pp_ss: *const StdVideoH265PictureParameterSet, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265SessionParametersAddInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265SessionParametersAddInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265SessionParametersAddInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_vps_count: u32::default(), p_std_vp_ss: ::core::ptr::null(), std_sps_count: u32::default(), p_std_sp_ss: ::core::ptr::null(), std_pps_count: u32::default(), p_std_pp_ss: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265SessionParametersAddInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR for VideoEncodeH265SessionParametersAddInfoKHR<'_> { } impl<'a> VideoEncodeH265SessionParametersAddInfoKHR<'a> { #[inline] pub fn std_vp_ss(mut self, std_vp_ss: &'a [StdVideoH265VideoParameterSet]) -> Self { self.std_vps_count = std_vp_ss.len() as _; self.p_std_vp_ss = std_vp_ss.as_ptr(); self } #[inline] pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH265SequenceParameterSet]) -> Self { self.std_sps_count = std_sp_ss.len() as _; self.p_std_sp_ss = std_sp_ss.as_ptr(); self } #[inline] pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH265PictureParameterSet]) -> Self { self.std_pps_count = std_pp_ss.len() as _; self.p_std_pp_ss = std_pp_ss.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265SessionParametersCreateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_std_vps_count: u32, pub max_std_sps_count: u32, pub max_std_pps_count: u32, pub p_parameters_add_info: *const VideoEncodeH265SessionParametersAddInfoKHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265SessionParametersCreateInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265SessionParametersCreateInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265SessionParametersCreateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_std_vps_count: u32::default(), max_std_sps_count: u32::default(), max_std_pps_count: u32::default(), p_parameters_add_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265SessionParametersCreateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR; } unsafe impl ExtendsVideoSessionParametersCreateInfoKHR for VideoEncodeH265SessionParametersCreateInfoKHR<'_> { } impl<'a> VideoEncodeH265SessionParametersCreateInfoKHR<'a> { #[inline] pub fn max_std_vps_count(mut self, max_std_vps_count: u32) -> Self { self.max_std_vps_count = max_std_vps_count; self } #[inline] pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self { self.max_std_sps_count = max_std_sps_count; self } #[inline] pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self { self.max_std_pps_count = max_std_pps_count; self } #[inline] pub fn parameters_add_info( mut self, parameters_add_info: &'a VideoEncodeH265SessionParametersAddInfoKHR<'a>, ) -> Self { self.p_parameters_add_info = parameters_add_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265SessionParametersGetInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub write_std_vps: Bool32, pub write_std_sps: Bool32, pub write_std_pps: Bool32, pub std_vps_id: u32, pub std_sps_id: u32, pub std_pps_id: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265SessionParametersGetInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265SessionParametersGetInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265SessionParametersGetInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), write_std_vps: Bool32::default(), write_std_sps: Bool32::default(), write_std_pps: Bool32::default(), std_vps_id: u32::default(), std_sps_id: u32::default(), std_pps_id: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265SessionParametersGetInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR; } unsafe impl ExtendsVideoEncodeSessionParametersGetInfoKHR for VideoEncodeH265SessionParametersGetInfoKHR<'_> { } impl<'a> VideoEncodeH265SessionParametersGetInfoKHR<'a> { #[inline] pub fn write_std_vps(mut self, write_std_vps: bool) -> Self { self.write_std_vps = write_std_vps.into(); self } #[inline] pub fn write_std_sps(mut self, write_std_sps: bool) -> Self { self.write_std_sps = write_std_sps.into(); self } #[inline] pub fn write_std_pps(mut self, write_std_pps: bool) -> Self { self.write_std_pps = write_std_pps.into(); self } #[inline] pub fn std_vps_id(mut self, std_vps_id: u32) -> Self { self.std_vps_id = std_vps_id; self } #[inline] pub fn std_sps_id(mut self, std_sps_id: u32) -> Self { self.std_sps_id = std_sps_id; self } #[inline] pub fn std_pps_id(mut self, std_pps_id: u32) -> Self { self.std_pps_id = std_pps_id; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265SessionParametersFeedbackInfoKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub has_std_vps_overrides: Bool32, pub has_std_sps_overrides: Bool32, pub has_std_pps_overrides: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265SessionParametersFeedbackInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265SessionParametersFeedbackInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265SessionParametersFeedbackInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), has_std_vps_overrides: Bool32::default(), has_std_sps_overrides: Bool32::default(), has_std_pps_overrides: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265SessionParametersFeedbackInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR; } unsafe impl ExtendsVideoEncodeSessionParametersFeedbackInfoKHR for VideoEncodeH265SessionParametersFeedbackInfoKHR<'_> { } impl<'a> VideoEncodeH265SessionParametersFeedbackInfoKHR<'a> { #[inline] pub fn has_std_vps_overrides(mut self, has_std_vps_overrides: bool) -> Self { self.has_std_vps_overrides = has_std_vps_overrides.into(); self } #[inline] pub fn has_std_sps_overrides(mut self, has_std_sps_overrides: bool) -> Self { self.has_std_sps_overrides = has_std_sps_overrides.into(); self } #[inline] pub fn has_std_pps_overrides(mut self, has_std_pps_overrides: bool) -> Self { self.has_std_pps_overrides = has_std_pps_overrides.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265PictureInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub nalu_slice_segment_entry_count: u32, pub p_nalu_slice_segment_entries: *const VideoEncodeH265NaluSliceSegmentInfoKHR<'a>, pub p_std_picture_info: *const StdVideoEncodeH265PictureInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265PictureInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265PictureInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265PictureInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), nalu_slice_segment_entry_count: u32::default(), p_nalu_slice_segment_entries: ::core::ptr::null(), p_std_picture_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265PictureInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_PICTURE_INFO_KHR; } unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH265PictureInfoKHR<'_> {} impl<'a> VideoEncodeH265PictureInfoKHR<'a> { #[inline] pub fn nalu_slice_segment_entries( mut self, nalu_slice_segment_entries: &'a [VideoEncodeH265NaluSliceSegmentInfoKHR<'a>], ) -> Self { self.nalu_slice_segment_entry_count = nalu_slice_segment_entries.len() as _; self.p_nalu_slice_segment_entries = nalu_slice_segment_entries.as_ptr(); self } #[inline] pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoEncodeH265PictureInfo) -> Self { self.p_std_picture_info = std_picture_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265NaluSliceSegmentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub constant_qp: i32, pub p_std_slice_segment_header: *const StdVideoEncodeH265SliceSegmentHeader, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265NaluSliceSegmentInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265NaluSliceSegmentInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265NaluSliceSegmentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), constant_qp: i32::default(), p_std_slice_segment_header: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265NaluSliceSegmentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR; } impl<'a> VideoEncodeH265NaluSliceSegmentInfoKHR<'a> { #[inline] pub fn constant_qp(mut self, constant_qp: i32) -> Self { self.constant_qp = constant_qp; self } #[inline] pub fn std_slice_segment_header( mut self, std_slice_segment_header: &'a StdVideoEncodeH265SliceSegmentHeader, ) -> Self { self.p_std_slice_segment_header = std_slice_segment_header; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265RateControlInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: VideoEncodeH265RateControlFlagsKHR, pub gop_frame_count: u32, pub idr_period: u32, pub consecutive_b_frame_count: u32, pub sub_layer_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265RateControlInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265RateControlInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265RateControlInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: VideoEncodeH265RateControlFlagsKHR::default(), gop_frame_count: u32::default(), idr_period: u32::default(), consecutive_b_frame_count: u32::default(), sub_layer_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265RateControlInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR; } unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH265RateControlInfoKHR<'_> {} unsafe impl ExtendsVideoBeginCodingInfoKHR for VideoEncodeH265RateControlInfoKHR<'_> {} impl<'a> VideoEncodeH265RateControlInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: VideoEncodeH265RateControlFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn gop_frame_count(mut self, gop_frame_count: u32) -> Self { self.gop_frame_count = gop_frame_count; self } #[inline] pub fn idr_period(mut self, idr_period: u32) -> Self { self.idr_period = idr_period; self } #[inline] pub fn consecutive_b_frame_count(mut self, consecutive_b_frame_count: u32) -> Self { self.consecutive_b_frame_count = consecutive_b_frame_count; self } #[inline] pub fn sub_layer_count(mut self, sub_layer_count: u32) -> Self { self.sub_layer_count = sub_layer_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VideoEncodeH265QpKHR { pub qp_i: i32, pub qp_p: i32, pub qp_b: i32, } impl VideoEncodeH265QpKHR { #[inline] pub fn qp_i(mut self, qp_i: i32) -> Self { self.qp_i = qp_i; self } #[inline] pub fn qp_p(mut self, qp_p: i32) -> Self { self.qp_p = qp_p; self } #[inline] pub fn qp_b(mut self, qp_b: i32) -> Self { self.qp_b = qp_b; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct VideoEncodeH265FrameSizeKHR { pub frame_i_size: u32, pub frame_p_size: u32, pub frame_b_size: u32, } impl VideoEncodeH265FrameSizeKHR { #[inline] pub fn frame_i_size(mut self, frame_i_size: u32) -> Self { self.frame_i_size = frame_i_size; self } #[inline] pub fn frame_p_size(mut self, frame_p_size: u32) -> Self { self.frame_p_size = frame_p_size; self } #[inline] pub fn frame_b_size(mut self, frame_b_size: u32) -> Self { self.frame_b_size = frame_b_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265GopRemainingFrameInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_gop_remaining_frames: Bool32, pub gop_remaining_i: u32, pub gop_remaining_p: u32, pub gop_remaining_b: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265GopRemainingFrameInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265GopRemainingFrameInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265GopRemainingFrameInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_gop_remaining_frames: Bool32::default(), gop_remaining_i: u32::default(), gop_remaining_p: u32::default(), gop_remaining_b: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265GopRemainingFrameInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR; } unsafe impl ExtendsVideoBeginCodingInfoKHR for VideoEncodeH265GopRemainingFrameInfoKHR<'_> {} impl<'a> VideoEncodeH265GopRemainingFrameInfoKHR<'a> { #[inline] pub fn use_gop_remaining_frames(mut self, use_gop_remaining_frames: bool) -> Self { self.use_gop_remaining_frames = use_gop_remaining_frames.into(); self } #[inline] pub fn gop_remaining_i(mut self, gop_remaining_i: u32) -> Self { self.gop_remaining_i = gop_remaining_i; self } #[inline] pub fn gop_remaining_p(mut self, gop_remaining_p: u32) -> Self { self.gop_remaining_p = gop_remaining_p; self } #[inline] pub fn gop_remaining_b(mut self, gop_remaining_b: u32) -> Self { self.gop_remaining_b = gop_remaining_b; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265RateControlLayerInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub use_min_qp: Bool32, pub min_qp: VideoEncodeH265QpKHR, pub use_max_qp: Bool32, pub max_qp: VideoEncodeH265QpKHR, pub use_max_frame_size: Bool32, pub max_frame_size: VideoEncodeH265FrameSizeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265RateControlLayerInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265RateControlLayerInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265RateControlLayerInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), use_min_qp: Bool32::default(), min_qp: VideoEncodeH265QpKHR::default(), use_max_qp: Bool32::default(), max_qp: VideoEncodeH265QpKHR::default(), use_max_frame_size: Bool32::default(), max_frame_size: VideoEncodeH265FrameSizeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265RateControlLayerInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR; } unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR for VideoEncodeH265RateControlLayerInfoKHR<'_> { } impl<'a> VideoEncodeH265RateControlLayerInfoKHR<'a> { #[inline] pub fn use_min_qp(mut self, use_min_qp: bool) -> Self { self.use_min_qp = use_min_qp.into(); self } #[inline] pub fn min_qp(mut self, min_qp: VideoEncodeH265QpKHR) -> Self { self.min_qp = min_qp; self } #[inline] pub fn use_max_qp(mut self, use_max_qp: bool) -> Self { self.use_max_qp = use_max_qp.into(); self } #[inline] pub fn max_qp(mut self, max_qp: VideoEncodeH265QpKHR) -> Self { self.max_qp = max_qp; self } #[inline] pub fn use_max_frame_size(mut self, use_max_frame_size: bool) -> Self { self.use_max_frame_size = use_max_frame_size.into(); self } #[inline] pub fn max_frame_size(mut self, max_frame_size: VideoEncodeH265FrameSizeKHR) -> Self { self.max_frame_size = max_frame_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265ProfileInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub std_profile_idc: StdVideoH265ProfileIdc, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265ProfileInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265ProfileInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265ProfileInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), std_profile_idc: StdVideoH265ProfileIdc::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265ProfileInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_PROFILE_INFO_KHR; } unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH265ProfileInfoKHR<'_> {} unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH265ProfileInfoKHR<'_> {} impl<'a> VideoEncodeH265ProfileInfoKHR<'a> { #[inline] pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH265ProfileIdc) -> Self { self.std_profile_idc = std_profile_idc; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct VideoEncodeH265DpbSlotInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_std_reference_info: *const StdVideoEncodeH265ReferenceInfo, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for VideoEncodeH265DpbSlotInfoKHR<'_> {} unsafe impl Sync for VideoEncodeH265DpbSlotInfoKHR<'_> {} impl ::core::default::Default for VideoEncodeH265DpbSlotInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_std_reference_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for VideoEncodeH265DpbSlotInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR; } unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoEncodeH265DpbSlotInfoKHR<'_> {} impl<'a> VideoEncodeH265DpbSlotInfoKHR<'a> { #[inline] pub fn std_reference_info( mut self, std_reference_info: &'a StdVideoEncodeH265ReferenceInfo, ) -> Self { self.p_std_reference_info = std_reference_info; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceInheritedViewportScissorFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub inherited_viewport_scissor2_d: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceInheritedViewportScissorFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceInheritedViewportScissorFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceInheritedViewportScissorFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), inherited_viewport_scissor2_d: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceInheritedViewportScissorFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInheritedViewportScissorFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInheritedViewportScissorFeaturesNV<'_> {} impl<'a> PhysicalDeviceInheritedViewportScissorFeaturesNV<'a> { #[inline] pub fn inherited_viewport_scissor2_d(mut self, inherited_viewport_scissor2_d: bool) -> Self { self.inherited_viewport_scissor2_d = inherited_viewport_scissor2_d.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferInheritanceViewportScissorInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub viewport_scissor2_d: Bool32, pub viewport_depth_count: u32, pub p_viewport_depths: *const Viewport, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferInheritanceViewportScissorInfoNV<'_> {} unsafe impl Sync for CommandBufferInheritanceViewportScissorInfoNV<'_> {} impl ::core::default::Default for CommandBufferInheritanceViewportScissorInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), viewport_scissor2_d: Bool32::default(), viewport_depth_count: u32::default(), p_viewport_depths: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferInheritanceViewportScissorInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV; } unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceViewportScissorInfoNV<'_> { } impl<'a> CommandBufferInheritanceViewportScissorInfoNV<'a> { #[inline] pub fn viewport_scissor2_d(mut self, viewport_scissor2_d: bool) -> Self { self.viewport_scissor2_d = viewport_scissor2_d.into(); self } #[inline] pub fn viewport_depth_count(mut self, viewport_depth_count: u32) -> Self { self.viewport_depth_count = viewport_depth_count; self } #[inline] pub fn viewport_depths(mut self, viewport_depths: &'a Viewport) -> Self { self.p_viewport_depths = viewport_depths; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ycbcr2plane444_formats: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ycbcr2plane444_formats: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'_> {} impl<'a> PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT<'a> { #[inline] pub fn ycbcr2plane444_formats(mut self, ycbcr2plane444_formats: bool) -> Self { self.ycbcr2plane444_formats = ycbcr2plane444_formats.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProvokingVertexFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub provoking_vertex_last: Bool32, pub transform_feedback_preserves_provoking_vertex: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceProvokingVertexFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceProvokingVertexFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceProvokingVertexFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), provoking_vertex_last: Bool32::default(), transform_feedback_preserves_provoking_vertex: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceProvokingVertexFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProvokingVertexFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProvokingVertexFeaturesEXT<'_> {} impl<'a> PhysicalDeviceProvokingVertexFeaturesEXT<'a> { #[inline] pub fn provoking_vertex_last(mut self, provoking_vertex_last: bool) -> Self { self.provoking_vertex_last = provoking_vertex_last.into(); self } #[inline] pub fn transform_feedback_preserves_provoking_vertex( mut self, transform_feedback_preserves_provoking_vertex: bool, ) -> Self { self.transform_feedback_preserves_provoking_vertex = transform_feedback_preserves_provoking_vertex.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceProvokingVertexPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub provoking_vertex_mode_per_pipeline: Bool32, pub transform_feedback_preserves_triangle_fan_provoking_vertex: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceProvokingVertexPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceProvokingVertexPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceProvokingVertexPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), provoking_vertex_mode_per_pipeline: Bool32::default(), transform_feedback_preserves_triangle_fan_provoking_vertex: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceProvokingVertexPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceProvokingVertexPropertiesEXT<'_> {} impl<'a> PhysicalDeviceProvokingVertexPropertiesEXT<'a> { #[inline] pub fn provoking_vertex_mode_per_pipeline( mut self, provoking_vertex_mode_per_pipeline: bool, ) -> Self { self.provoking_vertex_mode_per_pipeline = provoking_vertex_mode_per_pipeline.into(); self } #[inline] pub fn transform_feedback_preserves_triangle_fan_provoking_vertex( mut self, transform_feedback_preserves_triangle_fan_provoking_vertex: bool, ) -> Self { self.transform_feedback_preserves_triangle_fan_provoking_vertex = transform_feedback_preserves_triangle_fan_provoking_vertex.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRasterizationProvokingVertexStateCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub provoking_vertex_mode: ProvokingVertexModeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRasterizationProvokingVertexStateCreateInfoEXT<'_> {} unsafe impl Sync for PipelineRasterizationProvokingVertexStateCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineRasterizationProvokingVertexStateCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), provoking_vertex_mode: ProvokingVertexModeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRasterizationProvokingVertexStateCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineRasterizationStateCreateInfo for PipelineRasterizationProvokingVertexStateCreateInfoEXT<'_> { } impl<'a> PipelineRasterizationProvokingVertexStateCreateInfoEXT<'a> { #[inline] pub fn provoking_vertex_mode(mut self, provoking_vertex_mode: ProvokingVertexModeEXT) -> Self { self.provoking_vertex_mode = provoking_vertex_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CuModuleCreateInfoNVX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub data_size: usize, pub p_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CuModuleCreateInfoNVX<'_> {} unsafe impl Sync for CuModuleCreateInfoNVX<'_> {} impl ::core::default::Default for CuModuleCreateInfoNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), data_size: usize::default(), p_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CuModuleCreateInfoNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CU_MODULE_CREATE_INFO_NVX; } impl<'a> CuModuleCreateInfoNVX<'a> { #[inline] pub fn data(mut self, data: &'a [u8]) -> Self { self.data_size = data.len(); self.p_data = data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CuFunctionCreateInfoNVX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub module: CuModuleNVX, pub p_name: *const c_char, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CuFunctionCreateInfoNVX<'_> {} unsafe impl Sync for CuFunctionCreateInfoNVX<'_> {} impl ::core::default::Default for CuFunctionCreateInfoNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), module: CuModuleNVX::default(), p_name: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CuFunctionCreateInfoNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CU_FUNCTION_CREATE_INFO_NVX; } impl<'a> CuFunctionCreateInfoNVX<'a> { #[inline] pub fn module(mut self, module: CuModuleNVX) -> Self { self.module = module; self } #[inline] pub fn name(mut self, name: &'a CStr) -> Self { self.p_name = name.as_ptr(); self } #[inline] pub unsafe fn name_as_c_str(&self) -> Option<&CStr> { if self.p_name.is_null() { None } else { Some(CStr::from_ptr(self.p_name)) } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CuLaunchInfoNVX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub function: CuFunctionNVX, pub grid_dim_x: u32, pub grid_dim_y: u32, pub grid_dim_z: u32, pub block_dim_x: u32, pub block_dim_y: u32, pub block_dim_z: u32, pub shared_mem_bytes: u32, pub param_count: usize, pub p_params: *const *const c_void, pub extra_count: usize, pub p_extras: *const *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CuLaunchInfoNVX<'_> {} unsafe impl Sync for CuLaunchInfoNVX<'_> {} impl ::core::default::Default for CuLaunchInfoNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), function: CuFunctionNVX::default(), grid_dim_x: u32::default(), grid_dim_y: u32::default(), grid_dim_z: u32::default(), block_dim_x: u32::default(), block_dim_y: u32::default(), block_dim_z: u32::default(), shared_mem_bytes: u32::default(), param_count: usize::default(), p_params: ::core::ptr::null(), extra_count: usize::default(), p_extras: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CuLaunchInfoNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CU_LAUNCH_INFO_NVX; } impl<'a> CuLaunchInfoNVX<'a> { #[inline] pub fn function(mut self, function: CuFunctionNVX) -> Self { self.function = function; self } #[inline] pub fn grid_dim_x(mut self, grid_dim_x: u32) -> Self { self.grid_dim_x = grid_dim_x; self } #[inline] pub fn grid_dim_y(mut self, grid_dim_y: u32) -> Self { self.grid_dim_y = grid_dim_y; self } #[inline] pub fn grid_dim_z(mut self, grid_dim_z: u32) -> Self { self.grid_dim_z = grid_dim_z; self } #[inline] pub fn block_dim_x(mut self, block_dim_x: u32) -> Self { self.block_dim_x = block_dim_x; self } #[inline] pub fn block_dim_y(mut self, block_dim_y: u32) -> Self { self.block_dim_y = block_dim_y; self } #[inline] pub fn block_dim_z(mut self, block_dim_z: u32) -> Self { self.block_dim_z = block_dim_z; self } #[inline] pub fn shared_mem_bytes(mut self, shared_mem_bytes: u32) -> Self { self.shared_mem_bytes = shared_mem_bytes; self } #[inline] pub fn params(mut self, params: &'a [*const c_void]) -> Self { self.param_count = params.len(); self.p_params = params.as_ptr(); self } #[inline] pub fn extras(mut self, extras: &'a [*const c_void]) -> Self { self.extra_count = extras.len(); self.p_extras = extras.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorBufferFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub descriptor_buffer: Bool32, pub descriptor_buffer_capture_replay: Bool32, pub descriptor_buffer_image_layout_ignored: Bool32, pub descriptor_buffer_push_descriptors: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorBufferFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorBufferFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorBufferFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), descriptor_buffer: Bool32::default(), descriptor_buffer_capture_replay: Bool32::default(), descriptor_buffer_image_layout_ignored: Bool32::default(), descriptor_buffer_push_descriptors: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorBufferFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorBufferFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorBufferFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDescriptorBufferFeaturesEXT<'a> { #[inline] pub fn descriptor_buffer(mut self, descriptor_buffer: bool) -> Self { self.descriptor_buffer = descriptor_buffer.into(); self } #[inline] pub fn descriptor_buffer_capture_replay( mut self, descriptor_buffer_capture_replay: bool, ) -> Self { self.descriptor_buffer_capture_replay = descriptor_buffer_capture_replay.into(); self } #[inline] pub fn descriptor_buffer_image_layout_ignored( mut self, descriptor_buffer_image_layout_ignored: bool, ) -> Self { self.descriptor_buffer_image_layout_ignored = descriptor_buffer_image_layout_ignored.into(); self } #[inline] pub fn descriptor_buffer_push_descriptors( mut self, descriptor_buffer_push_descriptors: bool, ) -> Self { self.descriptor_buffer_push_descriptors = descriptor_buffer_push_descriptors.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorBufferPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub combined_image_sampler_descriptor_single_array: Bool32, pub bufferless_push_descriptors: Bool32, pub allow_sampler_image_view_post_submit_creation: Bool32, pub descriptor_buffer_offset_alignment: DeviceSize, pub max_descriptor_buffer_bindings: u32, pub max_resource_descriptor_buffer_bindings: u32, pub max_sampler_descriptor_buffer_bindings: u32, pub max_embedded_immutable_sampler_bindings: u32, pub max_embedded_immutable_samplers: u32, pub buffer_capture_replay_descriptor_data_size: usize, pub image_capture_replay_descriptor_data_size: usize, pub image_view_capture_replay_descriptor_data_size: usize, pub sampler_capture_replay_descriptor_data_size: usize, pub acceleration_structure_capture_replay_descriptor_data_size: usize, pub sampler_descriptor_size: usize, pub combined_image_sampler_descriptor_size: usize, pub sampled_image_descriptor_size: usize, pub storage_image_descriptor_size: usize, pub uniform_texel_buffer_descriptor_size: usize, pub robust_uniform_texel_buffer_descriptor_size: usize, pub storage_texel_buffer_descriptor_size: usize, pub robust_storage_texel_buffer_descriptor_size: usize, pub uniform_buffer_descriptor_size: usize, pub robust_uniform_buffer_descriptor_size: usize, pub storage_buffer_descriptor_size: usize, pub robust_storage_buffer_descriptor_size: usize, pub input_attachment_descriptor_size: usize, pub acceleration_structure_descriptor_size: usize, pub max_sampler_descriptor_buffer_range: DeviceSize, pub max_resource_descriptor_buffer_range: DeviceSize, pub sampler_descriptor_buffer_address_space_size: DeviceSize, pub resource_descriptor_buffer_address_space_size: DeviceSize, pub descriptor_buffer_address_space_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorBufferPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorBufferPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorBufferPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), combined_image_sampler_descriptor_single_array: Bool32::default(), bufferless_push_descriptors: Bool32::default(), allow_sampler_image_view_post_submit_creation: Bool32::default(), descriptor_buffer_offset_alignment: DeviceSize::default(), max_descriptor_buffer_bindings: u32::default(), max_resource_descriptor_buffer_bindings: u32::default(), max_sampler_descriptor_buffer_bindings: u32::default(), max_embedded_immutable_sampler_bindings: u32::default(), max_embedded_immutable_samplers: u32::default(), buffer_capture_replay_descriptor_data_size: usize::default(), image_capture_replay_descriptor_data_size: usize::default(), image_view_capture_replay_descriptor_data_size: usize::default(), sampler_capture_replay_descriptor_data_size: usize::default(), acceleration_structure_capture_replay_descriptor_data_size: usize::default(), sampler_descriptor_size: usize::default(), combined_image_sampler_descriptor_size: usize::default(), sampled_image_descriptor_size: usize::default(), storage_image_descriptor_size: usize::default(), uniform_texel_buffer_descriptor_size: usize::default(), robust_uniform_texel_buffer_descriptor_size: usize::default(), storage_texel_buffer_descriptor_size: usize::default(), robust_storage_texel_buffer_descriptor_size: usize::default(), uniform_buffer_descriptor_size: usize::default(), robust_uniform_buffer_descriptor_size: usize::default(), storage_buffer_descriptor_size: usize::default(), robust_storage_buffer_descriptor_size: usize::default(), input_attachment_descriptor_size: usize::default(), acceleration_structure_descriptor_size: usize::default(), max_sampler_descriptor_buffer_range: DeviceSize::default(), max_resource_descriptor_buffer_range: DeviceSize::default(), sampler_descriptor_buffer_address_space_size: DeviceSize::default(), resource_descriptor_buffer_address_space_size: DeviceSize::default(), descriptor_buffer_address_space_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorBufferPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorBufferPropertiesEXT<'_> {} impl<'a> PhysicalDeviceDescriptorBufferPropertiesEXT<'a> { #[inline] pub fn combined_image_sampler_descriptor_single_array( mut self, combined_image_sampler_descriptor_single_array: bool, ) -> Self { self.combined_image_sampler_descriptor_single_array = combined_image_sampler_descriptor_single_array.into(); self } #[inline] pub fn bufferless_push_descriptors(mut self, bufferless_push_descriptors: bool) -> Self { self.bufferless_push_descriptors = bufferless_push_descriptors.into(); self } #[inline] pub fn allow_sampler_image_view_post_submit_creation( mut self, allow_sampler_image_view_post_submit_creation: bool, ) -> Self { self.allow_sampler_image_view_post_submit_creation = allow_sampler_image_view_post_submit_creation.into(); self } #[inline] pub fn descriptor_buffer_offset_alignment( mut self, descriptor_buffer_offset_alignment: DeviceSize, ) -> Self { self.descriptor_buffer_offset_alignment = descriptor_buffer_offset_alignment; self } #[inline] pub fn max_descriptor_buffer_bindings(mut self, max_descriptor_buffer_bindings: u32) -> Self { self.max_descriptor_buffer_bindings = max_descriptor_buffer_bindings; self } #[inline] pub fn max_resource_descriptor_buffer_bindings( mut self, max_resource_descriptor_buffer_bindings: u32, ) -> Self { self.max_resource_descriptor_buffer_bindings = max_resource_descriptor_buffer_bindings; self } #[inline] pub fn max_sampler_descriptor_buffer_bindings( mut self, max_sampler_descriptor_buffer_bindings: u32, ) -> Self { self.max_sampler_descriptor_buffer_bindings = max_sampler_descriptor_buffer_bindings; self } #[inline] pub fn max_embedded_immutable_sampler_bindings( mut self, max_embedded_immutable_sampler_bindings: u32, ) -> Self { self.max_embedded_immutable_sampler_bindings = max_embedded_immutable_sampler_bindings; self } #[inline] pub fn max_embedded_immutable_samplers(mut self, max_embedded_immutable_samplers: u32) -> Self { self.max_embedded_immutable_samplers = max_embedded_immutable_samplers; self } #[inline] pub fn buffer_capture_replay_descriptor_data_size( mut self, buffer_capture_replay_descriptor_data_size: usize, ) -> Self { self.buffer_capture_replay_descriptor_data_size = buffer_capture_replay_descriptor_data_size; self } #[inline] pub fn image_capture_replay_descriptor_data_size( mut self, image_capture_replay_descriptor_data_size: usize, ) -> Self { self.image_capture_replay_descriptor_data_size = image_capture_replay_descriptor_data_size; self } #[inline] pub fn image_view_capture_replay_descriptor_data_size( mut self, image_view_capture_replay_descriptor_data_size: usize, ) -> Self { self.image_view_capture_replay_descriptor_data_size = image_view_capture_replay_descriptor_data_size; self } #[inline] pub fn sampler_capture_replay_descriptor_data_size( mut self, sampler_capture_replay_descriptor_data_size: usize, ) -> Self { self.sampler_capture_replay_descriptor_data_size = sampler_capture_replay_descriptor_data_size; self } #[inline] pub fn acceleration_structure_capture_replay_descriptor_data_size( mut self, acceleration_structure_capture_replay_descriptor_data_size: usize, ) -> Self { self.acceleration_structure_capture_replay_descriptor_data_size = acceleration_structure_capture_replay_descriptor_data_size; self } #[inline] pub fn sampler_descriptor_size(mut self, sampler_descriptor_size: usize) -> Self { self.sampler_descriptor_size = sampler_descriptor_size; self } #[inline] pub fn combined_image_sampler_descriptor_size( mut self, combined_image_sampler_descriptor_size: usize, ) -> Self { self.combined_image_sampler_descriptor_size = combined_image_sampler_descriptor_size; self } #[inline] pub fn sampled_image_descriptor_size(mut self, sampled_image_descriptor_size: usize) -> Self { self.sampled_image_descriptor_size = sampled_image_descriptor_size; self } #[inline] pub fn storage_image_descriptor_size(mut self, storage_image_descriptor_size: usize) -> Self { self.storage_image_descriptor_size = storage_image_descriptor_size; self } #[inline] pub fn uniform_texel_buffer_descriptor_size( mut self, uniform_texel_buffer_descriptor_size: usize, ) -> Self { self.uniform_texel_buffer_descriptor_size = uniform_texel_buffer_descriptor_size; self } #[inline] pub fn robust_uniform_texel_buffer_descriptor_size( mut self, robust_uniform_texel_buffer_descriptor_size: usize, ) -> Self { self.robust_uniform_texel_buffer_descriptor_size = robust_uniform_texel_buffer_descriptor_size; self } #[inline] pub fn storage_texel_buffer_descriptor_size( mut self, storage_texel_buffer_descriptor_size: usize, ) -> Self { self.storage_texel_buffer_descriptor_size = storage_texel_buffer_descriptor_size; self } #[inline] pub fn robust_storage_texel_buffer_descriptor_size( mut self, robust_storage_texel_buffer_descriptor_size: usize, ) -> Self { self.robust_storage_texel_buffer_descriptor_size = robust_storage_texel_buffer_descriptor_size; self } #[inline] pub fn uniform_buffer_descriptor_size(mut self, uniform_buffer_descriptor_size: usize) -> Self { self.uniform_buffer_descriptor_size = uniform_buffer_descriptor_size; self } #[inline] pub fn robust_uniform_buffer_descriptor_size( mut self, robust_uniform_buffer_descriptor_size: usize, ) -> Self { self.robust_uniform_buffer_descriptor_size = robust_uniform_buffer_descriptor_size; self } #[inline] pub fn storage_buffer_descriptor_size(mut self, storage_buffer_descriptor_size: usize) -> Self { self.storage_buffer_descriptor_size = storage_buffer_descriptor_size; self } #[inline] pub fn robust_storage_buffer_descriptor_size( mut self, robust_storage_buffer_descriptor_size: usize, ) -> Self { self.robust_storage_buffer_descriptor_size = robust_storage_buffer_descriptor_size; self } #[inline] pub fn input_attachment_descriptor_size( mut self, input_attachment_descriptor_size: usize, ) -> Self { self.input_attachment_descriptor_size = input_attachment_descriptor_size; self } #[inline] pub fn acceleration_structure_descriptor_size( mut self, acceleration_structure_descriptor_size: usize, ) -> Self { self.acceleration_structure_descriptor_size = acceleration_structure_descriptor_size; self } #[inline] pub fn max_sampler_descriptor_buffer_range( mut self, max_sampler_descriptor_buffer_range: DeviceSize, ) -> Self { self.max_sampler_descriptor_buffer_range = max_sampler_descriptor_buffer_range; self } #[inline] pub fn max_resource_descriptor_buffer_range( mut self, max_resource_descriptor_buffer_range: DeviceSize, ) -> Self { self.max_resource_descriptor_buffer_range = max_resource_descriptor_buffer_range; self } #[inline] pub fn sampler_descriptor_buffer_address_space_size( mut self, sampler_descriptor_buffer_address_space_size: DeviceSize, ) -> Self { self.sampler_descriptor_buffer_address_space_size = sampler_descriptor_buffer_address_space_size; self } #[inline] pub fn resource_descriptor_buffer_address_space_size( mut self, resource_descriptor_buffer_address_space_size: DeviceSize, ) -> Self { self.resource_descriptor_buffer_address_space_size = resource_descriptor_buffer_address_space_size; self } #[inline] pub fn descriptor_buffer_address_space_size( mut self, descriptor_buffer_address_space_size: DeviceSize, ) -> Self { self.descriptor_buffer_address_space_size = descriptor_buffer_address_space_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub combined_image_sampler_density_map_descriptor_size: usize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), combined_image_sampler_density_map_descriptor_size: usize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'_> { } impl<'a> PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT<'a> { #[inline] pub fn combined_image_sampler_density_map_descriptor_size( mut self, combined_image_sampler_density_map_descriptor_size: usize, ) -> Self { self.combined_image_sampler_density_map_descriptor_size = combined_image_sampler_density_map_descriptor_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorAddressInfoEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub address: DeviceAddress, pub range: DeviceSize, pub format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorAddressInfoEXT<'_> {} unsafe impl Sync for DescriptorAddressInfoEXT<'_> {} impl ::core::default::Default for DescriptorAddressInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), address: DeviceAddress::default(), range: DeviceSize::default(), format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorAddressInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_ADDRESS_INFO_EXT; } impl<'a> DescriptorAddressInfoEXT<'a> { #[inline] pub fn address(mut self, address: DeviceAddress) -> Self { self.address = address; self } #[inline] pub fn range(mut self, range: DeviceSize) -> Self { self.range = range; self } #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorBufferBindingInfoEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub address: DeviceAddress, pub usage: BufferUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorBufferBindingInfoEXT<'_> {} unsafe impl Sync for DescriptorBufferBindingInfoEXT<'_> {} impl ::core::default::Default for DescriptorBufferBindingInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), address: DeviceAddress::default(), usage: BufferUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorBufferBindingInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_BUFFER_BINDING_INFO_EXT; } pub unsafe trait ExtendsDescriptorBufferBindingInfoEXT {} impl<'a> DescriptorBufferBindingInfoEXT<'a> { #[inline] pub fn address(mut self, address: DeviceAddress) -> Self { self.address = address; self } #[inline] pub fn usage(mut self, usage: BufferUsageFlags) -> Self { self.usage = usage; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorBufferBindingPushDescriptorBufferHandleEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorBufferBindingPushDescriptorBufferHandleEXT<'_> {} unsafe impl Sync for DescriptorBufferBindingPushDescriptorBufferHandleEXT<'_> {} impl ::core::default::Default for DescriptorBufferBindingPushDescriptorBufferHandleEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorBufferBindingPushDescriptorBufferHandleEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT; } unsafe impl ExtendsDescriptorBufferBindingInfoEXT for DescriptorBufferBindingPushDescriptorBufferHandleEXT<'_> { } impl<'a> DescriptorBufferBindingPushDescriptorBufferHandleEXT<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union DescriptorDataEXT<'a> { pub p_sampler: *const Sampler, pub p_combined_image_sampler: *const DescriptorImageInfo, pub p_input_attachment_image: *const DescriptorImageInfo, pub p_sampled_image: *const DescriptorImageInfo, pub p_storage_image: *const DescriptorImageInfo, pub p_uniform_texel_buffer: *const DescriptorAddressInfoEXT<'a>, pub p_storage_texel_buffer: *const DescriptorAddressInfoEXT<'a>, pub p_uniform_buffer: *const DescriptorAddressInfoEXT<'a>, pub p_storage_buffer: *const DescriptorAddressInfoEXT<'a>, pub acceleration_structure: DeviceAddress, } impl<'a> ::core::default::Default for DescriptorDataEXT<'a> { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorGetInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: DescriptorType, pub data: DescriptorDataEXT<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorGetInfoEXT<'_> {} unsafe impl Sync for DescriptorGetInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DescriptorGetInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DescriptorGetInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("ty", &self.ty) .field("data", &"union") .finish() } } impl ::core::default::Default for DescriptorGetInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: DescriptorType::default(), data: DescriptorDataEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorGetInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_GET_INFO_EXT; } impl<'a> DescriptorGetInfoEXT<'a> { #[inline] pub fn ty(mut self, ty: DescriptorType) -> Self { self.ty = ty; self } #[inline] pub fn data(mut self, data: DescriptorDataEXT<'a>) -> Self { self.data = data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCaptureDescriptorDataInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: Buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCaptureDescriptorDataInfoEXT<'_> {} unsafe impl Sync for BufferCaptureDescriptorDataInfoEXT<'_> {} impl ::core::default::Default for BufferCaptureDescriptorDataInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: Buffer::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCaptureDescriptorDataInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT; } impl<'a> BufferCaptureDescriptorDataInfoEXT<'a> { #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageCaptureDescriptorDataInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageCaptureDescriptorDataInfoEXT<'_> {} unsafe impl Sync for ImageCaptureDescriptorDataInfoEXT<'_> {} impl ::core::default::Default for ImageCaptureDescriptorDataInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageCaptureDescriptorDataInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT; } impl<'a> ImageCaptureDescriptorDataInfoEXT<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewCaptureDescriptorDataInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_view: ImageView, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewCaptureDescriptorDataInfoEXT<'_> {} unsafe impl Sync for ImageViewCaptureDescriptorDataInfoEXT<'_> {} impl ::core::default::Default for ImageViewCaptureDescriptorDataInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_view: ImageView::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewCaptureDescriptorDataInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT; } impl<'a> ImageViewCaptureDescriptorDataInfoEXT<'a> { #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerCaptureDescriptorDataInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub sampler: Sampler, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerCaptureDescriptorDataInfoEXT<'_> {} unsafe impl Sync for SamplerCaptureDescriptorDataInfoEXT<'_> {} impl ::core::default::Default for SamplerCaptureDescriptorDataInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), sampler: Sampler::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerCaptureDescriptorDataInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT; } impl<'a> SamplerCaptureDescriptorDataInfoEXT<'a> { #[inline] pub fn sampler(mut self, sampler: Sampler) -> Self { self.sampler = sampler; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureCaptureDescriptorDataInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acceleration_structure: AccelerationStructureKHR, pub acceleration_structure_nv: AccelerationStructureNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureCaptureDescriptorDataInfoEXT<'_> {} unsafe impl Sync for AccelerationStructureCaptureDescriptorDataInfoEXT<'_> {} impl ::core::default::Default for AccelerationStructureCaptureDescriptorDataInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acceleration_structure: AccelerationStructureKHR::default(), acceleration_structure_nv: AccelerationStructureNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureCaptureDescriptorDataInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT; } impl<'a> AccelerationStructureCaptureDescriptorDataInfoEXT<'a> { #[inline] pub fn acceleration_structure( mut self, acceleration_structure: AccelerationStructureKHR, ) -> Self { self.acceleration_structure = acceleration_structure; self } #[inline] pub fn acceleration_structure_nv( mut self, acceleration_structure_nv: AccelerationStructureNV, ) -> Self { self.acceleration_structure_nv = acceleration_structure_nv; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpaqueCaptureDescriptorDataCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub opaque_capture_descriptor_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} unsafe impl Sync for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} impl ::core::default::Default for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), opaque_capture_descriptor_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpaqueCaptureDescriptorDataCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT; } unsafe impl ExtendsBufferCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} unsafe impl ExtendsImageCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} unsafe impl ExtendsImageViewCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} unsafe impl ExtendsSamplerCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> {} unsafe impl ExtendsAccelerationStructureCreateInfoKHR for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> { } unsafe impl ExtendsAccelerationStructureCreateInfoNV for OpaqueCaptureDescriptorDataCreateInfoEXT<'_> { } impl<'a> OpaqueCaptureDescriptorDataCreateInfoEXT<'a> { #[inline] pub fn opaque_capture_descriptor_data( mut self, opaque_capture_descriptor_data: *const c_void, ) -> Self { self.opaque_capture_descriptor_data = opaque_capture_descriptor_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderIntegerDotProductFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_integer_dot_product: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderIntegerDotProductFeatures<'_> {} unsafe impl Sync for PhysicalDeviceShaderIntegerDotProductFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceShaderIntegerDotProductFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_integer_dot_product: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderIntegerDotProductFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderIntegerDotProductFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderIntegerDotProductFeatures<'_> {} impl<'a> PhysicalDeviceShaderIntegerDotProductFeatures<'a> { #[inline] pub fn shader_integer_dot_product(mut self, shader_integer_dot_product: bool) -> Self { self.shader_integer_dot_product = shader_integer_dot_product.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderIntegerDotProductProperties<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub integer_dot_product8_bit_unsigned_accelerated: Bool32, pub integer_dot_product8_bit_signed_accelerated: Bool32, pub integer_dot_product8_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_unsigned_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_signed_accelerated: Bool32, pub integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: Bool32, pub integer_dot_product16_bit_unsigned_accelerated: Bool32, pub integer_dot_product16_bit_signed_accelerated: Bool32, pub integer_dot_product16_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product32_bit_unsigned_accelerated: Bool32, pub integer_dot_product32_bit_signed_accelerated: Bool32, pub integer_dot_product32_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product64_bit_unsigned_accelerated: Bool32, pub integer_dot_product64_bit_signed_accelerated: Bool32, pub integer_dot_product64_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_signed_accelerated: Bool32, pub integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderIntegerDotProductProperties<'_> {} unsafe impl Sync for PhysicalDeviceShaderIntegerDotProductProperties<'_> {} impl ::core::default::Default for PhysicalDeviceShaderIntegerDotProductProperties<'_> { #[inline] fn default() -> Self { Self { s_type : Self :: STRUCTURE_TYPE , p_next : :: core :: ptr :: null_mut () , integer_dot_product8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product64_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated : Bool32 :: default () , _marker : PhantomData , } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderIntegerDotProductProperties<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderIntegerDotProductProperties<'_> { } impl<'a> PhysicalDeviceShaderIntegerDotProductProperties<'a> { #[inline] pub fn integer_dot_product8_bit_unsigned_accelerated( mut self, integer_dot_product8_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_unsigned_accelerated = integer_dot_product8_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product8_bit_signed_accelerated( mut self, integer_dot_product8_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_signed_accelerated = integer_dot_product8_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product8_bit_mixed_signedness_accelerated( mut self, integer_dot_product8_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product8_bit_mixed_signedness_accelerated = integer_dot_product8_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_unsigned_accelerated( mut self, integer_dot_product4x8_bit_packed_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_unsigned_accelerated = integer_dot_product4x8_bit_packed_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_signed_accelerated( mut self, integer_dot_product4x8_bit_packed_signed_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_signed_accelerated = integer_dot_product4x8_bit_packed_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product4x8_bit_packed_mixed_signedness_accelerated( mut self, integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product4x8_bit_packed_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_unsigned_accelerated( mut self, integer_dot_product16_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_unsigned_accelerated = integer_dot_product16_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_signed_accelerated( mut self, integer_dot_product16_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_signed_accelerated = integer_dot_product16_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product16_bit_mixed_signedness_accelerated( mut self, integer_dot_product16_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product16_bit_mixed_signedness_accelerated = integer_dot_product16_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_unsigned_accelerated( mut self, integer_dot_product32_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_unsigned_accelerated = integer_dot_product32_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_signed_accelerated( mut self, integer_dot_product32_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_signed_accelerated = integer_dot_product32_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product32_bit_mixed_signedness_accelerated( mut self, integer_dot_product32_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product32_bit_mixed_signedness_accelerated = integer_dot_product32_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_unsigned_accelerated( mut self, integer_dot_product64_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_unsigned_accelerated = integer_dot_product64_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_signed_accelerated( mut self, integer_dot_product64_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_signed_accelerated = integer_dot_product64_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product64_bit_mixed_signedness_accelerated( mut self, integer_dot_product64_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product64_bit_mixed_signedness_accelerated = integer_dot_product64_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_signed_accelerated = integer_dot_product_accumulating_saturating8_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : bool, ) -> Self { self . integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated . into () ; self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_signed_accelerated = integer_dot_product_accumulating_saturating16_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_signed_accelerated = integer_dot_product_accumulating_saturating32_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated = integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_signed_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_signed_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_signed_accelerated = integer_dot_product_accumulating_saturating64_bit_signed_accelerated.into(); self } #[inline] pub fn integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated( mut self, integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: bool, ) -> Self { self.integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDrmPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub has_primary: Bool32, pub has_render: Bool32, pub primary_major: i64, pub primary_minor: i64, pub render_major: i64, pub render_minor: i64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDrmPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDrmPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDrmPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), has_primary: Bool32::default(), has_render: Bool32::default(), primary_major: i64::default(), primary_minor: i64::default(), render_major: i64::default(), render_minor: i64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDrmPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DRM_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDrmPropertiesEXT<'_> {} impl<'a> PhysicalDeviceDrmPropertiesEXT<'a> { #[inline] pub fn has_primary(mut self, has_primary: bool) -> Self { self.has_primary = has_primary.into(); self } #[inline] pub fn has_render(mut self, has_render: bool) -> Self { self.has_render = has_render.into(); self } #[inline] pub fn primary_major(mut self, primary_major: i64) -> Self { self.primary_major = primary_major; self } #[inline] pub fn primary_minor(mut self, primary_minor: i64) -> Self { self.primary_minor = primary_minor; self } #[inline] pub fn render_major(mut self, render_major: i64) -> Self { self.render_major = render_major; self } #[inline] pub fn render_minor(mut self, render_minor: i64) -> Self { self.render_minor = render_minor; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub fragment_shader_barycentric: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), fragment_shader_barycentric: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'_> {} impl<'a> PhysicalDeviceFragmentShaderBarycentricFeaturesKHR<'a> { #[inline] pub fn fragment_shader_barycentric(mut self, fragment_shader_barycentric: bool) -> Self { self.fragment_shader_barycentric = fragment_shader_barycentric.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub tri_strip_vertex_order_independent_of_provoking_vertex: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), tri_strip_vertex_order_independent_of_provoking_vertex: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'_> { } impl<'a> PhysicalDeviceFragmentShaderBarycentricPropertiesKHR<'a> { #[inline] pub fn tri_strip_vertex_order_independent_of_provoking_vertex( mut self, tri_strip_vertex_order_independent_of_provoking_vertex: bool, ) -> Self { self.tri_strip_vertex_order_independent_of_provoking_vertex = tri_strip_vertex_order_independent_of_provoking_vertex.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingMotionBlurFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_motion_blur: Bool32, pub ray_tracing_motion_blur_pipeline_trace_rays_indirect: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_motion_blur: Bool32::default(), ray_tracing_motion_blur_pipeline_trace_rays_indirect: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMotionBlurFeaturesNV<'_> {} impl<'a> PhysicalDeviceRayTracingMotionBlurFeaturesNV<'a> { #[inline] pub fn ray_tracing_motion_blur(mut self, ray_tracing_motion_blur: bool) -> Self { self.ray_tracing_motion_blur = ray_tracing_motion_blur.into(); self } #[inline] pub fn ray_tracing_motion_blur_pipeline_trace_rays_indirect( mut self, ray_tracing_motion_blur_pipeline_trace_rays_indirect: bool, ) -> Self { self.ray_tracing_motion_blur_pipeline_trace_rays_indirect = ray_tracing_motion_blur_pipeline_trace_rays_indirect.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingValidationFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_validation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingValidationFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingValidationFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingValidationFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_validation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingValidationFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingValidationFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingValidationFeaturesNV<'_> {} impl<'a> PhysicalDeviceRayTracingValidationFeaturesNV<'a> { #[inline] pub fn ray_tracing_validation(mut self, ray_tracing_validation: bool) -> Self { self.ray_tracing_validation = ray_tracing_validation.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureGeometryMotionTrianglesDataNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub vertex_data: DeviceOrHostAddressConstKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureGeometryMotionTrianglesDataNV<'_> {} unsafe impl Sync for AccelerationStructureGeometryMotionTrianglesDataNV<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureGeometryMotionTrianglesDataNV<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureGeometryMotionTrianglesDataNV") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("vertex_data", &"union") .finish() } } impl ::core::default::Default for AccelerationStructureGeometryMotionTrianglesDataNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), vertex_data: DeviceOrHostAddressConstKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureGeometryMotionTrianglesDataNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV; } unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR for AccelerationStructureGeometryMotionTrianglesDataNV<'_> { } impl<'a> AccelerationStructureGeometryMotionTrianglesDataNV<'a> { #[inline] pub fn vertex_data(mut self, vertex_data: DeviceOrHostAddressConstKHR) -> Self { self.vertex_data = vertex_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureMotionInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub max_instances: u32, pub flags: AccelerationStructureMotionInfoFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureMotionInfoNV<'_> {} unsafe impl Sync for AccelerationStructureMotionInfoNV<'_> {} impl ::core::default::Default for AccelerationStructureMotionInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), max_instances: u32::default(), flags: AccelerationStructureMotionInfoFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureMotionInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_MOTION_INFO_NV; } unsafe impl ExtendsAccelerationStructureCreateInfoKHR for AccelerationStructureMotionInfoNV<'_> {} impl<'a> AccelerationStructureMotionInfoNV<'a> { #[inline] pub fn max_instances(mut self, max_instances: u32) -> Self { self.max_instances = max_instances; self } #[inline] pub fn flags(mut self, flags: AccelerationStructureMotionInfoFlagsNV) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct SRTDataNV { pub sx: f32, pub a: f32, pub b: f32, pub pvx: f32, pub sy: f32, pub c: f32, pub pvy: f32, pub sz: f32, pub pvz: f32, pub qx: f32, pub qy: f32, pub qz: f32, pub qw: f32, pub tx: f32, pub ty: f32, pub tz: f32, } impl SRTDataNV { #[inline] pub fn sx(mut self, sx: f32) -> Self { self.sx = sx; self } #[inline] pub fn a(mut self, a: f32) -> Self { self.a = a; self } #[inline] pub fn b(mut self, b: f32) -> Self { self.b = b; self } #[inline] pub fn pvx(mut self, pvx: f32) -> Self { self.pvx = pvx; self } #[inline] pub fn sy(mut self, sy: f32) -> Self { self.sy = sy; self } #[inline] pub fn c(mut self, c: f32) -> Self { self.c = c; self } #[inline] pub fn pvy(mut self, pvy: f32) -> Self { self.pvy = pvy; self } #[inline] pub fn sz(mut self, sz: f32) -> Self { self.sz = sz; self } #[inline] pub fn pvz(mut self, pvz: f32) -> Self { self.pvz = pvz; self } #[inline] pub fn qx(mut self, qx: f32) -> Self { self.qx = qx; self } #[inline] pub fn qy(mut self, qy: f32) -> Self { self.qy = qy; self } #[inline] pub fn qz(mut self, qz: f32) -> Self { self.qz = qz; self } #[inline] pub fn qw(mut self, qw: f32) -> Self { self.qw = qw; self } #[inline] pub fn tx(mut self, tx: f32) -> Self { self.tx = tx; self } #[inline] pub fn ty(mut self, ty: f32) -> Self { self.ty = ty; self } #[inline] pub fn tz(mut self, tz: f32) -> Self { self.tz = tz; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub struct AccelerationStructureSRTMotionInstanceNV { pub transform_t0: SRTDataNV, pub transform_t1: SRTDataNV, #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"] pub instance_custom_index_and_mask: Packed24_8, #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"] pub instance_shader_binding_table_record_offset_and_flags: Packed24_8, pub acceleration_structure_reference: AccelerationStructureReferenceKHR, } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub struct AccelerationStructureMatrixMotionInstanceNV { pub transform_t0: TransformMatrixKHR, pub transform_t1: TransformMatrixKHR, #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"] pub instance_custom_index_and_mask: Packed24_8, #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"] pub instance_shader_binding_table_record_offset_and_flags: Packed24_8, pub acceleration_structure_reference: AccelerationStructureReferenceKHR, } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] pub union AccelerationStructureMotionInstanceDataNV { pub static_instance: AccelerationStructureInstanceKHR, pub matrix_motion_instance: AccelerationStructureMatrixMotionInstanceNV, pub srt_motion_instance: AccelerationStructureSRTMotionInstanceNV, } impl ::core::default::Default for AccelerationStructureMotionInstanceDataNV { #[inline] fn default() -> Self { unsafe { ::core::mem::zeroed() } } } #[repr(C)] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct AccelerationStructureMotionInstanceNV { pub ty: AccelerationStructureMotionInstanceTypeNV, pub flags: AccelerationStructureMotionInstanceFlagsNV, pub data: AccelerationStructureMotionInstanceDataNV, } #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureMotionInstanceNV { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureMotionInstanceNV") .field("ty", &self.ty) .field("flags", &self.flags) .field("data", &"union") .finish() } } impl AccelerationStructureMotionInstanceNV { #[inline] pub fn ty(mut self, ty: AccelerationStructureMotionInstanceTypeNV) -> Self { self.ty = ty; self } #[inline] pub fn flags(mut self, flags: AccelerationStructureMotionInstanceFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn data(mut self, data: AccelerationStructureMotionInstanceDataNV) -> Self { self.data = data; self } } #[doc = ""] pub type RemoteAddressNV = c_void; #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryGetRemoteAddressInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub handle_type: ExternalMemoryHandleTypeFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryGetRemoteAddressInfoNV<'_> {} unsafe impl Sync for MemoryGetRemoteAddressInfoNV<'_> {} impl ::core::default::Default for MemoryGetRemoteAddressInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), handle_type: ExternalMemoryHandleTypeFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryGetRemoteAddressInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_REMOTE_ADDRESS_INFO_NV; } impl<'a> MemoryGetRemoteAddressInfoNV<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self { self.handle_type = handle_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMemoryBufferCollectionFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub collection: BufferCollectionFUCHSIA, pub index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMemoryBufferCollectionFUCHSIA<'_> {} unsafe impl Sync for ImportMemoryBufferCollectionFUCHSIA<'_> {} impl ::core::default::Default for ImportMemoryBufferCollectionFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), collection: BufferCollectionFUCHSIA::default(), index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMemoryBufferCollectionFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA; } unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryBufferCollectionFUCHSIA<'_> {} impl<'a> ImportMemoryBufferCollectionFUCHSIA<'a> { #[inline] pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self { self.collection = collection; self } #[inline] pub fn index(mut self, index: u32) -> Self { self.index = index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCollectionImageCreateInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub collection: BufferCollectionFUCHSIA, pub index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCollectionImageCreateInfoFUCHSIA<'_> {} unsafe impl Sync for BufferCollectionImageCreateInfoFUCHSIA<'_> {} impl ::core::default::Default for BufferCollectionImageCreateInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), collection: BufferCollectionFUCHSIA::default(), index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCollectionImageCreateInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA; } unsafe impl ExtendsImageCreateInfo for BufferCollectionImageCreateInfoFUCHSIA<'_> {} impl<'a> BufferCollectionImageCreateInfoFUCHSIA<'a> { #[inline] pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self { self.collection = collection; self } #[inline] pub fn index(mut self, index: u32) -> Self { self.index = index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCollectionBufferCreateInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub collection: BufferCollectionFUCHSIA, pub index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCollectionBufferCreateInfoFUCHSIA<'_> {} unsafe impl Sync for BufferCollectionBufferCreateInfoFUCHSIA<'_> {} impl ::core::default::Default for BufferCollectionBufferCreateInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), collection: BufferCollectionFUCHSIA::default(), index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCollectionBufferCreateInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA; } unsafe impl ExtendsBufferCreateInfo for BufferCollectionBufferCreateInfoFUCHSIA<'_> {} impl<'a> BufferCollectionBufferCreateInfoFUCHSIA<'a> { #[inline] pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self { self.collection = collection; self } #[inline] pub fn index(mut self, index: u32) -> Self { self.index = index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCollectionCreateInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub collection_token: zx_handle_t, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCollectionCreateInfoFUCHSIA<'_> {} unsafe impl Sync for BufferCollectionCreateInfoFUCHSIA<'_> {} impl ::core::default::Default for BufferCollectionCreateInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), collection_token: zx_handle_t::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCollectionCreateInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_CREATE_INFO_FUCHSIA; } impl<'a> BufferCollectionCreateInfoFUCHSIA<'a> { #[inline] pub fn collection_token(mut self, collection_token: zx_handle_t) -> Self { self.collection_token = collection_token; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCollectionPropertiesFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_type_bits: u32, pub buffer_count: u32, pub create_info_index: u32, pub sysmem_pixel_format: u64, pub format_features: FormatFeatureFlags, pub sysmem_color_space_index: SysmemColorSpaceFUCHSIA<'a>, pub sampler_ycbcr_conversion_components: ComponentMapping, pub suggested_ycbcr_model: SamplerYcbcrModelConversion, pub suggested_ycbcr_range: SamplerYcbcrRange, pub suggested_x_chroma_offset: ChromaLocation, pub suggested_y_chroma_offset: ChromaLocation, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCollectionPropertiesFUCHSIA<'_> {} unsafe impl Sync for BufferCollectionPropertiesFUCHSIA<'_> {} impl ::core::default::Default for BufferCollectionPropertiesFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_type_bits: u32::default(), buffer_count: u32::default(), create_info_index: u32::default(), sysmem_pixel_format: u64::default(), format_features: FormatFeatureFlags::default(), sysmem_color_space_index: SysmemColorSpaceFUCHSIA::default(), sampler_ycbcr_conversion_components: ComponentMapping::default(), suggested_ycbcr_model: SamplerYcbcrModelConversion::default(), suggested_ycbcr_range: SamplerYcbcrRange::default(), suggested_x_chroma_offset: ChromaLocation::default(), suggested_y_chroma_offset: ChromaLocation::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCollectionPropertiesFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_PROPERTIES_FUCHSIA; } impl<'a> BufferCollectionPropertiesFUCHSIA<'a> { #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } #[inline] pub fn buffer_count(mut self, buffer_count: u32) -> Self { self.buffer_count = buffer_count; self } #[inline] pub fn create_info_index(mut self, create_info_index: u32) -> Self { self.create_info_index = create_info_index; self } #[inline] pub fn sysmem_pixel_format(mut self, sysmem_pixel_format: u64) -> Self { self.sysmem_pixel_format = sysmem_pixel_format; self } #[inline] pub fn format_features(mut self, format_features: FormatFeatureFlags) -> Self { self.format_features = format_features; self } #[inline] pub fn sysmem_color_space_index( mut self, sysmem_color_space_index: SysmemColorSpaceFUCHSIA<'a>, ) -> Self { self.sysmem_color_space_index = sysmem_color_space_index; self } #[inline] pub fn sampler_ycbcr_conversion_components( mut self, sampler_ycbcr_conversion_components: ComponentMapping, ) -> Self { self.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components; self } #[inline] pub fn suggested_ycbcr_model( mut self, suggested_ycbcr_model: SamplerYcbcrModelConversion, ) -> Self { self.suggested_ycbcr_model = suggested_ycbcr_model; self } #[inline] pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self { self.suggested_ycbcr_range = suggested_ycbcr_range; self } #[inline] pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self { self.suggested_x_chroma_offset = suggested_x_chroma_offset; self } #[inline] pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self { self.suggested_y_chroma_offset = suggested_y_chroma_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferConstraintsInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub create_info: BufferCreateInfo<'a>, pub required_format_features: FormatFeatureFlags, pub buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferConstraintsInfoFUCHSIA<'_> {} unsafe impl Sync for BufferConstraintsInfoFUCHSIA<'_> {} impl ::core::default::Default for BufferConstraintsInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), create_info: BufferCreateInfo::default(), required_format_features: FormatFeatureFlags::default(), buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferConstraintsInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CONSTRAINTS_INFO_FUCHSIA; } impl<'a> BufferConstraintsInfoFUCHSIA<'a> { #[inline] pub fn create_info(mut self, create_info: BufferCreateInfo<'a>) -> Self { self.create_info = create_info; self } #[inline] pub fn required_format_features( mut self, required_format_features: FormatFeatureFlags, ) -> Self { self.required_format_features = required_format_features; self } #[inline] pub fn buffer_collection_constraints( mut self, buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA<'a>, ) -> Self { self.buffer_collection_constraints = buffer_collection_constraints; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SysmemColorSpaceFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub color_space: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SysmemColorSpaceFUCHSIA<'_> {} unsafe impl Sync for SysmemColorSpaceFUCHSIA<'_> {} impl ::core::default::Default for SysmemColorSpaceFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), color_space: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SysmemColorSpaceFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SYSMEM_COLOR_SPACE_FUCHSIA; } impl<'a> SysmemColorSpaceFUCHSIA<'a> { #[inline] pub fn color_space(mut self, color_space: u32) -> Self { self.color_space = color_space; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageFormatConstraintsInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_create_info: ImageCreateInfo<'a>, pub required_format_features: FormatFeatureFlags, pub flags: ImageFormatConstraintsFlagsFUCHSIA, pub sysmem_pixel_format: u64, pub color_space_count: u32, pub p_color_spaces: *const SysmemColorSpaceFUCHSIA<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageFormatConstraintsInfoFUCHSIA<'_> {} unsafe impl Sync for ImageFormatConstraintsInfoFUCHSIA<'_> {} impl ::core::default::Default for ImageFormatConstraintsInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_create_info: ImageCreateInfo::default(), required_format_features: FormatFeatureFlags::default(), flags: ImageFormatConstraintsFlagsFUCHSIA::default(), sysmem_pixel_format: u64::default(), color_space_count: u32::default(), p_color_spaces: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageFormatConstraintsInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA; } impl<'a> ImageFormatConstraintsInfoFUCHSIA<'a> { #[inline] pub fn image_create_info(mut self, image_create_info: ImageCreateInfo<'a>) -> Self { self.image_create_info = image_create_info; self } #[inline] pub fn required_format_features( mut self, required_format_features: FormatFeatureFlags, ) -> Self { self.required_format_features = required_format_features; self } #[inline] pub fn flags(mut self, flags: ImageFormatConstraintsFlagsFUCHSIA) -> Self { self.flags = flags; self } #[inline] pub fn sysmem_pixel_format(mut self, sysmem_pixel_format: u64) -> Self { self.sysmem_pixel_format = sysmem_pixel_format; self } #[inline] pub fn color_spaces(mut self, color_spaces: &'a [SysmemColorSpaceFUCHSIA<'a>]) -> Self { self.color_space_count = color_spaces.len() as _; self.p_color_spaces = color_spaces.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageConstraintsInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub format_constraints_count: u32, pub p_format_constraints: *const ImageFormatConstraintsInfoFUCHSIA<'a>, pub buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA<'a>, pub flags: ImageConstraintsInfoFlagsFUCHSIA, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageConstraintsInfoFUCHSIA<'_> {} unsafe impl Sync for ImageConstraintsInfoFUCHSIA<'_> {} impl ::core::default::Default for ImageConstraintsInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), format_constraints_count: u32::default(), p_format_constraints: ::core::ptr::null(), buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA::default(), flags: ImageConstraintsInfoFlagsFUCHSIA::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageConstraintsInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CONSTRAINTS_INFO_FUCHSIA; } impl<'a> ImageConstraintsInfoFUCHSIA<'a> { #[inline] pub fn format_constraints( mut self, format_constraints: &'a [ImageFormatConstraintsInfoFUCHSIA<'a>], ) -> Self { self.format_constraints_count = format_constraints.len() as _; self.p_format_constraints = format_constraints.as_ptr(); self } #[inline] pub fn buffer_collection_constraints( mut self, buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA<'a>, ) -> Self { self.buffer_collection_constraints = buffer_collection_constraints; self } #[inline] pub fn flags(mut self, flags: ImageConstraintsInfoFlagsFUCHSIA) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BufferCollectionConstraintsInfoFUCHSIA<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub min_buffer_count: u32, pub max_buffer_count: u32, pub min_buffer_count_for_camping: u32, pub min_buffer_count_for_dedicated_slack: u32, pub min_buffer_count_for_shared_slack: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BufferCollectionConstraintsInfoFUCHSIA<'_> {} unsafe impl Sync for BufferCollectionConstraintsInfoFUCHSIA<'_> {} impl ::core::default::Default for BufferCollectionConstraintsInfoFUCHSIA<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), min_buffer_count: u32::default(), max_buffer_count: u32::default(), min_buffer_count_for_camping: u32::default(), min_buffer_count_for_dedicated_slack: u32::default(), min_buffer_count_for_shared_slack: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BufferCollectionConstraintsInfoFUCHSIA<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA; } impl<'a> BufferCollectionConstraintsInfoFUCHSIA<'a> { #[inline] pub fn min_buffer_count(mut self, min_buffer_count: u32) -> Self { self.min_buffer_count = min_buffer_count; self } #[inline] pub fn max_buffer_count(mut self, max_buffer_count: u32) -> Self { self.max_buffer_count = max_buffer_count; self } #[inline] pub fn min_buffer_count_for_camping(mut self, min_buffer_count_for_camping: u32) -> Self { self.min_buffer_count_for_camping = min_buffer_count_for_camping; self } #[inline] pub fn min_buffer_count_for_dedicated_slack( mut self, min_buffer_count_for_dedicated_slack: u32, ) -> Self { self.min_buffer_count_for_dedicated_slack = min_buffer_count_for_dedicated_slack; self } #[inline] pub fn min_buffer_count_for_shared_slack( mut self, min_buffer_count_for_shared_slack: u32, ) -> Self { self.min_buffer_count_for_shared_slack = min_buffer_count_for_shared_slack; self } } handle_nondispatchable!( CudaModuleNV, CUDA_MODULE_NV, doc = "" ); handle_nondispatchable!( CudaFunctionNV, CUDA_FUNCTION_NV, doc = "" ); #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CudaModuleCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub data_size: usize, pub p_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CudaModuleCreateInfoNV<'_> {} unsafe impl Sync for CudaModuleCreateInfoNV<'_> {} impl ::core::default::Default for CudaModuleCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), data_size: usize::default(), p_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CudaModuleCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CUDA_MODULE_CREATE_INFO_NV; } impl<'a> CudaModuleCreateInfoNV<'a> { #[inline] pub fn data(mut self, data: &'a [u8]) -> Self { self.data_size = data.len(); self.p_data = data.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CudaFunctionCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub module: CudaModuleNV, pub p_name: *const c_char, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CudaFunctionCreateInfoNV<'_> {} unsafe impl Sync for CudaFunctionCreateInfoNV<'_> {} impl ::core::default::Default for CudaFunctionCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), module: CudaModuleNV::default(), p_name: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CudaFunctionCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CUDA_FUNCTION_CREATE_INFO_NV; } impl<'a> CudaFunctionCreateInfoNV<'a> { #[inline] pub fn module(mut self, module: CudaModuleNV) -> Self { self.module = module; self } #[inline] pub fn name(mut self, name: &'a CStr) -> Self { self.p_name = name.as_ptr(); self } #[inline] pub unsafe fn name_as_c_str(&self) -> Option<&CStr> { if self.p_name.is_null() { None } else { Some(CStr::from_ptr(self.p_name)) } } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CudaLaunchInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub function: CudaFunctionNV, pub grid_dim_x: u32, pub grid_dim_y: u32, pub grid_dim_z: u32, pub block_dim_x: u32, pub block_dim_y: u32, pub block_dim_z: u32, pub shared_mem_bytes: u32, pub param_count: usize, pub p_params: *const *const c_void, pub extra_count: usize, pub p_extras: *const *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CudaLaunchInfoNV<'_> {} unsafe impl Sync for CudaLaunchInfoNV<'_> {} impl ::core::default::Default for CudaLaunchInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), function: CudaFunctionNV::default(), grid_dim_x: u32::default(), grid_dim_y: u32::default(), grid_dim_z: u32::default(), block_dim_x: u32::default(), block_dim_y: u32::default(), block_dim_z: u32::default(), shared_mem_bytes: u32::default(), param_count: usize::default(), p_params: ::core::ptr::null(), extra_count: usize::default(), p_extras: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CudaLaunchInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::CUDA_LAUNCH_INFO_NV; } impl<'a> CudaLaunchInfoNV<'a> { #[inline] pub fn function(mut self, function: CudaFunctionNV) -> Self { self.function = function; self } #[inline] pub fn grid_dim_x(mut self, grid_dim_x: u32) -> Self { self.grid_dim_x = grid_dim_x; self } #[inline] pub fn grid_dim_y(mut self, grid_dim_y: u32) -> Self { self.grid_dim_y = grid_dim_y; self } #[inline] pub fn grid_dim_z(mut self, grid_dim_z: u32) -> Self { self.grid_dim_z = grid_dim_z; self } #[inline] pub fn block_dim_x(mut self, block_dim_x: u32) -> Self { self.block_dim_x = block_dim_x; self } #[inline] pub fn block_dim_y(mut self, block_dim_y: u32) -> Self { self.block_dim_y = block_dim_y; self } #[inline] pub fn block_dim_z(mut self, block_dim_z: u32) -> Self { self.block_dim_z = block_dim_z; self } #[inline] pub fn shared_mem_bytes(mut self, shared_mem_bytes: u32) -> Self { self.shared_mem_bytes = shared_mem_bytes; self } #[inline] pub fn params(mut self, params: &'a [*const c_void]) -> Self { self.param_count = params.len(); self.p_params = params.as_ptr(); self } #[inline] pub fn extras(mut self, extras: &'a [*const c_void]) -> Self { self.extra_count = extras.len(); self.p_extras = extras.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format_rgba10x6_without_y_cb_cr_sampler: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format_rgba10x6_without_y_cb_cr_sampler: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'_> {} impl<'a> PhysicalDeviceRGBA10X6FormatsFeaturesEXT<'a> { #[inline] pub fn format_rgba10x6_without_y_cb_cr_sampler( mut self, format_rgba10x6_without_y_cb_cr_sampler: bool, ) -> Self { self.format_rgba10x6_without_y_cb_cr_sampler = format_rgba10x6_without_y_cb_cr_sampler.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FormatProperties3<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub linear_tiling_features: FormatFeatureFlags2, pub optimal_tiling_features: FormatFeatureFlags2, pub buffer_features: FormatFeatureFlags2, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FormatProperties3<'_> {} unsafe impl Sync for FormatProperties3<'_> {} impl ::core::default::Default for FormatProperties3<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), linear_tiling_features: FormatFeatureFlags2::default(), optimal_tiling_features: FormatFeatureFlags2::default(), buffer_features: FormatFeatureFlags2::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FormatProperties3<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FORMAT_PROPERTIES_3; } unsafe impl ExtendsFormatProperties2 for FormatProperties3<'_> {} impl<'a> FormatProperties3<'a> { #[inline] pub fn linear_tiling_features(mut self, linear_tiling_features: FormatFeatureFlags2) -> Self { self.linear_tiling_features = linear_tiling_features; self } #[inline] pub fn optimal_tiling_features(mut self, optimal_tiling_features: FormatFeatureFlags2) -> Self { self.optimal_tiling_features = optimal_tiling_features; self } #[inline] pub fn buffer_features(mut self, buffer_features: FormatFeatureFlags2) -> Self { self.buffer_features = buffer_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DrmFormatModifierPropertiesList2EXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub drm_format_modifier_count: u32, pub p_drm_format_modifier_properties: *mut DrmFormatModifierProperties2EXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DrmFormatModifierPropertiesList2EXT<'_> {} unsafe impl Sync for DrmFormatModifierPropertiesList2EXT<'_> {} impl ::core::default::Default for DrmFormatModifierPropertiesList2EXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), drm_format_modifier_count: u32::default(), p_drm_format_modifier_properties: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DrmFormatModifierPropertiesList2EXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT; } unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesList2EXT<'_> {} impl<'a> DrmFormatModifierPropertiesList2EXT<'a> { #[inline] pub fn drm_format_modifier_properties( mut self, drm_format_modifier_properties: &'a mut [DrmFormatModifierProperties2EXT], ) -> Self { self.drm_format_modifier_count = drm_format_modifier_properties.len() as _; self.p_drm_format_modifier_properties = drm_format_modifier_properties.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DrmFormatModifierProperties2EXT { pub drm_format_modifier: u64, pub drm_format_modifier_plane_count: u32, pub drm_format_modifier_tiling_features: FormatFeatureFlags2, } impl DrmFormatModifierProperties2EXT { #[inline] pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self { self.drm_format_modifier = drm_format_modifier; self } #[inline] pub fn drm_format_modifier_plane_count(mut self, drm_format_modifier_plane_count: u32) -> Self { self.drm_format_modifier_plane_count = drm_format_modifier_plane_count; self } #[inline] pub fn drm_format_modifier_tiling_features( mut self, drm_format_modifier_tiling_features: FormatFeatureFlags2, ) -> Self { self.drm_format_modifier_tiling_features = drm_format_modifier_tiling_features; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidHardwareBufferFormatProperties2ANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format: Format, pub external_format: u64, pub format_features: FormatFeatureFlags2, pub sampler_ycbcr_conversion_components: ComponentMapping, pub suggested_ycbcr_model: SamplerYcbcrModelConversion, pub suggested_ycbcr_range: SamplerYcbcrRange, pub suggested_x_chroma_offset: ChromaLocation, pub suggested_y_chroma_offset: ChromaLocation, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidHardwareBufferFormatProperties2ANDROID<'_> {} unsafe impl Sync for AndroidHardwareBufferFormatProperties2ANDROID<'_> {} impl ::core::default::Default for AndroidHardwareBufferFormatProperties2ANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format: Format::default(), external_format: u64::default(), format_features: FormatFeatureFlags2::default(), sampler_ycbcr_conversion_components: ComponentMapping::default(), suggested_ycbcr_model: SamplerYcbcrModelConversion::default(), suggested_ycbcr_range: SamplerYcbcrRange::default(), suggested_x_chroma_offset: ChromaLocation::default(), suggested_y_chroma_offset: ChromaLocation::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidHardwareBufferFormatProperties2ANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID; } unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID for AndroidHardwareBufferFormatProperties2ANDROID<'_> { } impl<'a> AndroidHardwareBufferFormatProperties2ANDROID<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn external_format(mut self, external_format: u64) -> Self { self.external_format = external_format; self } #[inline] pub fn format_features(mut self, format_features: FormatFeatureFlags2) -> Self { self.format_features = format_features; self } #[inline] pub fn sampler_ycbcr_conversion_components( mut self, sampler_ycbcr_conversion_components: ComponentMapping, ) -> Self { self.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components; self } #[inline] pub fn suggested_ycbcr_model( mut self, suggested_ycbcr_model: SamplerYcbcrModelConversion, ) -> Self { self.suggested_ycbcr_model = suggested_ycbcr_model; self } #[inline] pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self { self.suggested_ycbcr_range = suggested_ycbcr_range; self } #[inline] pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self { self.suggested_x_chroma_offset = suggested_x_chroma_offset; self } #[inline] pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self { self.suggested_y_chroma_offset = suggested_y_chroma_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRenderingCreateInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub view_mask: u32, pub color_attachment_count: u32, pub p_color_attachment_formats: *const Format, pub depth_attachment_format: Format, pub stencil_attachment_format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRenderingCreateInfo<'_> {} unsafe impl Sync for PipelineRenderingCreateInfo<'_> {} impl ::core::default::Default for PipelineRenderingCreateInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), view_mask: u32::default(), color_attachment_count: u32::default(), p_color_attachment_formats: ::core::ptr::null(), depth_attachment_format: Format::default(), stencil_attachment_format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRenderingCreateInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RENDERING_CREATE_INFO; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRenderingCreateInfo<'_> {} impl<'a> PipelineRenderingCreateInfo<'a> { #[inline] pub fn view_mask(mut self, view_mask: u32) -> Self { self.view_mask = view_mask; self } #[inline] pub fn color_attachment_formats(mut self, color_attachment_formats: &'a [Format]) -> Self { self.color_attachment_count = color_attachment_formats.len() as _; self.p_color_attachment_formats = color_attachment_formats.as_ptr(); self } #[inline] pub fn depth_attachment_format(mut self, depth_attachment_format: Format) -> Self { self.depth_attachment_format = depth_attachment_format; self } #[inline] pub fn stencil_attachment_format(mut self, stencil_attachment_format: Format) -> Self { self.stencil_attachment_format = stencil_attachment_format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: RenderingFlags, pub render_area: Rect2D, pub layer_count: u32, pub view_mask: u32, pub color_attachment_count: u32, pub p_color_attachments: *const RenderingAttachmentInfo<'a>, pub p_depth_attachment: *const RenderingAttachmentInfo<'a>, pub p_stencil_attachment: *const RenderingAttachmentInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingInfo<'_> {} unsafe impl Sync for RenderingInfo<'_> {} impl ::core::default::Default for RenderingInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: RenderingFlags::default(), render_area: Rect2D::default(), layer_count: u32::default(), view_mask: u32::default(), color_attachment_count: u32::default(), p_color_attachments: ::core::ptr::null(), p_depth_attachment: ::core::ptr::null(), p_stencil_attachment: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_INFO; } pub unsafe trait ExtendsRenderingInfo {} impl<'a> RenderingInfo<'a> { #[inline] pub fn flags(mut self, flags: RenderingFlags) -> Self { self.flags = flags; self } #[inline] pub fn render_area(mut self, render_area: Rect2D) -> Self { self.render_area = render_area; self } #[inline] pub fn layer_count(mut self, layer_count: u32) -> Self { self.layer_count = layer_count; self } #[inline] pub fn view_mask(mut self, view_mask: u32) -> Self { self.view_mask = view_mask; self } #[inline] pub fn color_attachments( mut self, color_attachments: &'a [RenderingAttachmentInfo<'a>], ) -> Self { self.color_attachment_count = color_attachments.len() as _; self.p_color_attachments = color_attachments.as_ptr(); self } #[inline] pub fn depth_attachment(mut self, depth_attachment: &'a RenderingAttachmentInfo<'a>) -> Self { self.p_depth_attachment = depth_attachment; self } #[inline] pub fn stencil_attachment( mut self, stencil_attachment: &'a RenderingAttachmentInfo<'a>, ) -> Self { self.p_stencil_attachment = stencil_attachment; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingAttachmentInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_view: ImageView, pub image_layout: ImageLayout, pub resolve_mode: ResolveModeFlags, pub resolve_image_view: ImageView, pub resolve_image_layout: ImageLayout, pub load_op: AttachmentLoadOp, pub store_op: AttachmentStoreOp, pub clear_value: ClearValue, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingAttachmentInfo<'_> {} unsafe impl Sync for RenderingAttachmentInfo<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for RenderingAttachmentInfo<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("RenderingAttachmentInfo") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("image_view", &self.image_view) .field("image_layout", &self.image_layout) .field("resolve_mode", &self.resolve_mode) .field("resolve_image_view", &self.resolve_image_view) .field("resolve_image_layout", &self.resolve_image_layout) .field("load_op", &self.load_op) .field("store_op", &self.store_op) .field("clear_value", &"union") .finish() } } impl ::core::default::Default for RenderingAttachmentInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_view: ImageView::default(), image_layout: ImageLayout::default(), resolve_mode: ResolveModeFlags::default(), resolve_image_view: ImageView::default(), resolve_image_layout: ImageLayout::default(), load_op: AttachmentLoadOp::default(), store_op: AttachmentStoreOp::default(), clear_value: ClearValue::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingAttachmentInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_ATTACHMENT_INFO; } impl<'a> RenderingAttachmentInfo<'a> { #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn image_layout(mut self, image_layout: ImageLayout) -> Self { self.image_layout = image_layout; self } #[inline] pub fn resolve_mode(mut self, resolve_mode: ResolveModeFlags) -> Self { self.resolve_mode = resolve_mode; self } #[inline] pub fn resolve_image_view(mut self, resolve_image_view: ImageView) -> Self { self.resolve_image_view = resolve_image_view; self } #[inline] pub fn resolve_image_layout(mut self, resolve_image_layout: ImageLayout) -> Self { self.resolve_image_layout = resolve_image_layout; self } #[inline] pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self { self.load_op = load_op; self } #[inline] pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self { self.store_op = store_op; self } #[inline] pub fn clear_value(mut self, clear_value: ClearValue) -> Self { self.clear_value = clear_value; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingFragmentShadingRateAttachmentInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_view: ImageView, pub image_layout: ImageLayout, pub shading_rate_attachment_texel_size: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingFragmentShadingRateAttachmentInfoKHR<'_> {} unsafe impl Sync for RenderingFragmentShadingRateAttachmentInfoKHR<'_> {} impl ::core::default::Default for RenderingFragmentShadingRateAttachmentInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_view: ImageView::default(), image_layout: ImageLayout::default(), shading_rate_attachment_texel_size: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingFragmentShadingRateAttachmentInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR; } unsafe impl ExtendsRenderingInfo for RenderingFragmentShadingRateAttachmentInfoKHR<'_> {} impl<'a> RenderingFragmentShadingRateAttachmentInfoKHR<'a> { #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn image_layout(mut self, image_layout: ImageLayout) -> Self { self.image_layout = image_layout; self } #[inline] pub fn shading_rate_attachment_texel_size( mut self, shading_rate_attachment_texel_size: Extent2D, ) -> Self { self.shading_rate_attachment_texel_size = shading_rate_attachment_texel_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingFragmentDensityMapAttachmentInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image_view: ImageView, pub image_layout: ImageLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingFragmentDensityMapAttachmentInfoEXT<'_> {} unsafe impl Sync for RenderingFragmentDensityMapAttachmentInfoEXT<'_> {} impl ::core::default::Default for RenderingFragmentDensityMapAttachmentInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image_view: ImageView::default(), image_layout: ImageLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingFragmentDensityMapAttachmentInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT; } unsafe impl ExtendsRenderingInfo for RenderingFragmentDensityMapAttachmentInfoEXT<'_> {} impl<'a> RenderingFragmentDensityMapAttachmentInfoEXT<'a> { #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn image_layout(mut self, image_layout: ImageLayout) -> Self { self.image_layout = image_layout; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDynamicRenderingFeatures<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub dynamic_rendering: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDynamicRenderingFeatures<'_> {} unsafe impl Sync for PhysicalDeviceDynamicRenderingFeatures<'_> {} impl ::core::default::Default for PhysicalDeviceDynamicRenderingFeatures<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), dynamic_rendering: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDynamicRenderingFeatures<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDynamicRenderingFeatures<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDynamicRenderingFeatures<'_> {} impl<'a> PhysicalDeviceDynamicRenderingFeatures<'a> { #[inline] pub fn dynamic_rendering(mut self, dynamic_rendering: bool) -> Self { self.dynamic_rendering = dynamic_rendering.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CommandBufferInheritanceRenderingInfo<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: RenderingFlags, pub view_mask: u32, pub color_attachment_count: u32, pub p_color_attachment_formats: *const Format, pub depth_attachment_format: Format, pub stencil_attachment_format: Format, pub rasterization_samples: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CommandBufferInheritanceRenderingInfo<'_> {} unsafe impl Sync for CommandBufferInheritanceRenderingInfo<'_> {} impl ::core::default::Default for CommandBufferInheritanceRenderingInfo<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: RenderingFlags::default(), view_mask: u32::default(), color_attachment_count: u32::default(), p_color_attachment_formats: ::core::ptr::null(), depth_attachment_format: Format::default(), stencil_attachment_format: Format::default(), rasterization_samples: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CommandBufferInheritanceRenderingInfo<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO; } unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceRenderingInfo<'_> {} impl<'a> CommandBufferInheritanceRenderingInfo<'a> { #[inline] pub fn flags(mut self, flags: RenderingFlags) -> Self { self.flags = flags; self } #[inline] pub fn view_mask(mut self, view_mask: u32) -> Self { self.view_mask = view_mask; self } #[inline] pub fn color_attachment_formats(mut self, color_attachment_formats: &'a [Format]) -> Self { self.color_attachment_count = color_attachment_formats.len() as _; self.p_color_attachment_formats = color_attachment_formats.as_ptr(); self } #[inline] pub fn depth_attachment_format(mut self, depth_attachment_format: Format) -> Self { self.depth_attachment_format = depth_attachment_format; self } #[inline] pub fn stencil_attachment_format(mut self, stencil_attachment_format: Format) -> Self { self.stencil_attachment_format = stencil_attachment_format; self } #[inline] pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self { self.rasterization_samples = rasterization_samples; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AttachmentSampleCountInfoAMD<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub color_attachment_count: u32, pub p_color_attachment_samples: *const SampleCountFlags, pub depth_stencil_attachment_samples: SampleCountFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AttachmentSampleCountInfoAMD<'_> {} unsafe impl Sync for AttachmentSampleCountInfoAMD<'_> {} impl ::core::default::Default for AttachmentSampleCountInfoAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), color_attachment_count: u32::default(), p_color_attachment_samples: ::core::ptr::null(), depth_stencil_attachment_samples: SampleCountFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AttachmentSampleCountInfoAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_SAMPLE_COUNT_INFO_AMD; } unsafe impl ExtendsCommandBufferInheritanceInfo for AttachmentSampleCountInfoAMD<'_> {} unsafe impl ExtendsGraphicsPipelineCreateInfo for AttachmentSampleCountInfoAMD<'_> {} impl<'a> AttachmentSampleCountInfoAMD<'a> { #[inline] pub fn color_attachment_samples( mut self, color_attachment_samples: &'a [SampleCountFlags], ) -> Self { self.color_attachment_count = color_attachment_samples.len() as _; self.p_color_attachment_samples = color_attachment_samples.as_ptr(); self } #[inline] pub fn depth_stencil_attachment_samples( mut self, depth_stencil_attachment_samples: SampleCountFlags, ) -> Self { self.depth_stencil_attachment_samples = depth_stencil_attachment_samples; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MultiviewPerViewAttributesInfoNVX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub per_view_attributes: Bool32, pub per_view_attributes_position_x_only: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MultiviewPerViewAttributesInfoNVX<'_> {} unsafe impl Sync for MultiviewPerViewAttributesInfoNVX<'_> {} impl ::core::default::Default for MultiviewPerViewAttributesInfoNVX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), per_view_attributes: Bool32::default(), per_view_attributes_position_x_only: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MultiviewPerViewAttributesInfoNVX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX; } unsafe impl ExtendsCommandBufferInheritanceInfo for MultiviewPerViewAttributesInfoNVX<'_> {} unsafe impl ExtendsGraphicsPipelineCreateInfo for MultiviewPerViewAttributesInfoNVX<'_> {} unsafe impl ExtendsRenderingInfo for MultiviewPerViewAttributesInfoNVX<'_> {} impl<'a> MultiviewPerViewAttributesInfoNVX<'a> { #[inline] pub fn per_view_attributes(mut self, per_view_attributes: bool) -> Self { self.per_view_attributes = per_view_attributes.into(); self } #[inline] pub fn per_view_attributes_position_x_only( mut self, per_view_attributes_position_x_only: bool, ) -> Self { self.per_view_attributes_position_x_only = per_view_attributes_position_x_only.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageViewMinLodFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_lod: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageViewMinLodFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageViewMinLodFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageViewMinLodFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_lod: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageViewMinLodFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageViewMinLodFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageViewMinLodFeaturesEXT<'_> {} impl<'a> PhysicalDeviceImageViewMinLodFeaturesEXT<'a> { #[inline] pub fn min_lod(mut self, min_lod: bool) -> Self { self.min_lod = min_lod.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewMinLodCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub min_lod: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewMinLodCreateInfoEXT<'_> {} unsafe impl Sync for ImageViewMinLodCreateInfoEXT<'_> {} impl ::core::default::Default for ImageViewMinLodCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), min_lod: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewMinLodCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT; } unsafe impl ExtendsImageViewCreateInfo for ImageViewMinLodCreateInfoEXT<'_> {} impl<'a> ImageViewMinLodCreateInfoEXT<'a> { #[inline] pub fn min_lod(mut self, min_lod: f32) -> Self { self.min_lod = min_lod; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub rasterization_order_color_attachment_access: Bool32, pub rasterization_order_depth_attachment_access: Bool32, pub rasterization_order_stencil_attachment_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), rasterization_order_color_attachment_access: Bool32::default(), rasterization_order_depth_attachment_access: Bool32::default(), rasterization_order_stencil_attachment_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'_> { } impl<'a> PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT<'a> { #[inline] pub fn rasterization_order_color_attachment_access( mut self, rasterization_order_color_attachment_access: bool, ) -> Self { self.rasterization_order_color_attachment_access = rasterization_order_color_attachment_access.into(); self } #[inline] pub fn rasterization_order_depth_attachment_access( mut self, rasterization_order_depth_attachment_access: bool, ) -> Self { self.rasterization_order_depth_attachment_access = rasterization_order_depth_attachment_access.into(); self } #[inline] pub fn rasterization_order_stencil_attachment_access( mut self, rasterization_order_stencil_attachment_access: bool, ) -> Self { self.rasterization_order_stencil_attachment_access = rasterization_order_stencil_attachment_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLinearColorAttachmentFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub linear_color_attachment: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceLinearColorAttachmentFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceLinearColorAttachmentFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceLinearColorAttachmentFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), linear_color_attachment: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceLinearColorAttachmentFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLinearColorAttachmentFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLinearColorAttachmentFeaturesNV<'_> {} impl<'a> PhysicalDeviceLinearColorAttachmentFeaturesNV<'a> { #[inline] pub fn linear_color_attachment(mut self, linear_color_attachment: bool) -> Self { self.linear_color_attachment = linear_color_attachment.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub graphics_pipeline_library: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), graphics_pipeline_library: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'_> {} impl<'a> PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT<'a> { #[inline] pub fn graphics_pipeline_library(mut self, graphics_pipeline_library: bool) -> Self { self.graphics_pipeline_library = graphics_pipeline_library.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub graphics_pipeline_library_fast_linking: Bool32, pub graphics_pipeline_library_independent_interpolation_decoration: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), graphics_pipeline_library_fast_linking: Bool32::default(), graphics_pipeline_library_independent_interpolation_decoration: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'_> { } impl<'a> PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT<'a> { #[inline] pub fn graphics_pipeline_library_fast_linking( mut self, graphics_pipeline_library_fast_linking: bool, ) -> Self { self.graphics_pipeline_library_fast_linking = graphics_pipeline_library_fast_linking.into(); self } #[inline] pub fn graphics_pipeline_library_independent_interpolation_decoration( mut self, graphics_pipeline_library_independent_interpolation_decoration: bool, ) -> Self { self.graphics_pipeline_library_independent_interpolation_decoration = graphics_pipeline_library_independent_interpolation_decoration.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GraphicsPipelineLibraryCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: GraphicsPipelineLibraryFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GraphicsPipelineLibraryCreateInfoEXT<'_> {} unsafe impl Sync for GraphicsPipelineLibraryCreateInfoEXT<'_> {} impl ::core::default::Default for GraphicsPipelineLibraryCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: GraphicsPipelineLibraryFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GraphicsPipelineLibraryCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT; } unsafe impl ExtendsGraphicsPipelineCreateInfo for GraphicsPipelineLibraryCreateInfoEXT<'_> {} impl<'a> GraphicsPipelineLibraryCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: GraphicsPipelineLibraryFlagsEXT) -> Self { self.flags = flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub descriptor_set_host_mapping: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), descriptor_set_host_mapping: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'_> {} impl<'a> PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE<'a> { #[inline] pub fn descriptor_set_host_mapping(mut self, descriptor_set_host_mapping: bool) -> Self { self.descriptor_set_host_mapping = descriptor_set_host_mapping.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetBindingReferenceVALVE<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub descriptor_set_layout: DescriptorSetLayout, pub binding: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetBindingReferenceVALVE<'_> {} unsafe impl Sync for DescriptorSetBindingReferenceVALVE<'_> {} impl ::core::default::Default for DescriptorSetBindingReferenceVALVE<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), descriptor_set_layout: DescriptorSetLayout::default(), binding: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetBindingReferenceVALVE<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_BINDING_REFERENCE_VALVE; } impl<'a> DescriptorSetBindingReferenceVALVE<'a> { #[inline] pub fn descriptor_set_layout(mut self, descriptor_set_layout: DescriptorSetLayout) -> Self { self.descriptor_set_layout = descriptor_set_layout; self } #[inline] pub fn binding(mut self, binding: u32) -> Self { self.binding = binding; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DescriptorSetLayoutHostMappingInfoVALVE<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub descriptor_offset: usize, pub descriptor_size: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DescriptorSetLayoutHostMappingInfoVALVE<'_> {} unsafe impl Sync for DescriptorSetLayoutHostMappingInfoVALVE<'_> {} impl ::core::default::Default for DescriptorSetLayoutHostMappingInfoVALVE<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), descriptor_offset: usize::default(), descriptor_size: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DescriptorSetLayoutHostMappingInfoVALVE<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE; } impl<'a> DescriptorSetLayoutHostMappingInfoVALVE<'a> { #[inline] pub fn descriptor_offset(mut self, descriptor_offset: usize) -> Self { self.descriptor_offset = descriptor_offset; self } #[inline] pub fn descriptor_size(mut self, descriptor_size: u32) -> Self { self.descriptor_size = descriptor_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceNestedCommandBufferFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub nested_command_buffer: Bool32, pub nested_command_buffer_rendering: Bool32, pub nested_command_buffer_simultaneous_use: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceNestedCommandBufferFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceNestedCommandBufferFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceNestedCommandBufferFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), nested_command_buffer: Bool32::default(), nested_command_buffer_rendering: Bool32::default(), nested_command_buffer_simultaneous_use: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceNestedCommandBufferFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceNestedCommandBufferFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceNestedCommandBufferFeaturesEXT<'_> {} impl<'a> PhysicalDeviceNestedCommandBufferFeaturesEXT<'a> { #[inline] pub fn nested_command_buffer(mut self, nested_command_buffer: bool) -> Self { self.nested_command_buffer = nested_command_buffer.into(); self } #[inline] pub fn nested_command_buffer_rendering( mut self, nested_command_buffer_rendering: bool, ) -> Self { self.nested_command_buffer_rendering = nested_command_buffer_rendering.into(); self } #[inline] pub fn nested_command_buffer_simultaneous_use( mut self, nested_command_buffer_simultaneous_use: bool, ) -> Self { self.nested_command_buffer_simultaneous_use = nested_command_buffer_simultaneous_use.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceNestedCommandBufferPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_command_buffer_nesting_level: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceNestedCommandBufferPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceNestedCommandBufferPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceNestedCommandBufferPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_command_buffer_nesting_level: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceNestedCommandBufferPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceNestedCommandBufferPropertiesEXT<'_> { } impl<'a> PhysicalDeviceNestedCommandBufferPropertiesEXT<'a> { #[inline] pub fn max_command_buffer_nesting_level( mut self, max_command_buffer_nesting_level: u32, ) -> Self { self.max_command_buffer_nesting_level = max_command_buffer_nesting_level; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_module_identifier: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_module_identifier: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderModuleIdentifierFeaturesEXT<'a> { #[inline] pub fn shader_module_identifier(mut self, shader_module_identifier: bool) -> Self { self.shader_module_identifier = shader_module_identifier.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_module_identifier_algorithm_uuid: [u8; UUID_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_module_identifier_algorithm_uuid: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'_> { } impl<'a> PhysicalDeviceShaderModuleIdentifierPropertiesEXT<'a> { #[inline] pub fn shader_module_identifier_algorithm_uuid( mut self, shader_module_identifier_algorithm_uuid: [u8; UUID_SIZE], ) -> Self { self.shader_module_identifier_algorithm_uuid = shader_module_identifier_algorithm_uuid; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineShaderStageModuleIdentifierCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub identifier_size: u32, pub p_identifier: *const u8, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineShaderStageModuleIdentifierCreateInfoEXT<'_> {} unsafe impl Sync for PipelineShaderStageModuleIdentifierCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineShaderStageModuleIdentifierCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), identifier_size: u32::default(), p_identifier: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineShaderStageModuleIdentifierCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT; } unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineShaderStageModuleIdentifierCreateInfoEXT<'_> { } impl<'a> PipelineShaderStageModuleIdentifierCreateInfoEXT<'a> { #[inline] pub fn identifier(mut self, identifier: &'a [u8]) -> Self { self.identifier_size = identifier.len() as _; self.p_identifier = identifier.as_ptr(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShaderModuleIdentifierEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub identifier_size: u32, pub identifier: [u8; MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ShaderModuleIdentifierEXT<'_> {} unsafe impl Sync for ShaderModuleIdentifierEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for ShaderModuleIdentifierEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("ShaderModuleIdentifierEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("identifier_size", &self.identifier_size) .field("identifier", &self.identifier_as_slice()) .finish() } } impl ::core::default::Default for ShaderModuleIdentifierEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), identifier_size: u32::default(), identifier: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ShaderModuleIdentifierEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SHADER_MODULE_IDENTIFIER_EXT; } impl<'a> ShaderModuleIdentifierEXT<'a> { #[inline] pub fn identifier(mut self, identifier: &'_ [u8]) -> Self { self.identifier_size = identifier.len() as _; self.identifier[..identifier.len()].copy_from_slice(identifier); self } #[inline] pub fn identifier_as_slice(&self) -> &[u8] { &self.identifier[..self.identifier_size as _] } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageCompressionControlEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ImageCompressionFlagsEXT, pub compression_control_plane_count: u32, pub p_fixed_rate_flags: *mut ImageCompressionFixedRateFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageCompressionControlEXT<'_> {} unsafe impl Sync for ImageCompressionControlEXT<'_> {} impl ::core::default::Default for ImageCompressionControlEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ImageCompressionFlagsEXT::default(), compression_control_plane_count: u32::default(), p_fixed_rate_flags: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageCompressionControlEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COMPRESSION_CONTROL_EXT; } unsafe impl ExtendsImageCreateInfo for ImageCompressionControlEXT<'_> {} unsafe impl ExtendsSwapchainCreateInfoKHR for ImageCompressionControlEXT<'_> {} unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageCompressionControlEXT<'_> {} impl<'a> ImageCompressionControlEXT<'a> { #[inline] pub fn flags(mut self, flags: ImageCompressionFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn fixed_rate_flags( mut self, fixed_rate_flags: &'a mut [ImageCompressionFixedRateFlagsEXT], ) -> Self { self.compression_control_plane_count = fixed_rate_flags.len() as _; self.p_fixed_rate_flags = fixed_rate_flags.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageCompressionControlFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_compression_control: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageCompressionControlFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageCompressionControlFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageCompressionControlFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_compression_control: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageCompressionControlFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageCompressionControlFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageCompressionControlFeaturesEXT<'_> {} impl<'a> PhysicalDeviceImageCompressionControlFeaturesEXT<'a> { #[inline] pub fn image_compression_control(mut self, image_compression_control: bool) -> Self { self.image_compression_control = image_compression_control.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageCompressionPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_compression_flags: ImageCompressionFlagsEXT, pub image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageCompressionPropertiesEXT<'_> {} unsafe impl Sync for ImageCompressionPropertiesEXT<'_> {} impl ::core::default::Default for ImageCompressionPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_compression_flags: ImageCompressionFlagsEXT::default(), image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageCompressionPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COMPRESSION_PROPERTIES_EXT; } unsafe impl ExtendsImageFormatProperties2 for ImageCompressionPropertiesEXT<'_> {} unsafe impl ExtendsSurfaceFormat2KHR for ImageCompressionPropertiesEXT<'_> {} unsafe impl ExtendsSubresourceLayout2KHR for ImageCompressionPropertiesEXT<'_> {} impl<'a> ImageCompressionPropertiesEXT<'a> { #[inline] pub fn image_compression_flags( mut self, image_compression_flags: ImageCompressionFlagsEXT, ) -> Self { self.image_compression_flags = image_compression_flags; self } #[inline] pub fn image_compression_fixed_rate_flags( mut self, image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT, ) -> Self { self.image_compression_fixed_rate_flags = image_compression_fixed_rate_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_compression_control_swapchain: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_compression_control_swapchain: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'_> { } impl<'a> PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT<'a> { #[inline] pub fn image_compression_control_swapchain( mut self, image_compression_control_swapchain: bool, ) -> Self { self.image_compression_control_swapchain = image_compression_control_swapchain.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageSubresource2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub image_subresource: ImageSubresource, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageSubresource2KHR<'_> {} unsafe impl Sync for ImageSubresource2KHR<'_> {} impl ::core::default::Default for ImageSubresource2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), image_subresource: ImageSubresource::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageSubresource2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SUBRESOURCE_2_KHR; } impl<'a> ImageSubresource2KHR<'a> { #[inline] pub fn image_subresource(mut self, image_subresource: ImageSubresource) -> Self { self.image_subresource = image_subresource; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SubresourceLayout2KHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subresource_layout: SubresourceLayout, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SubresourceLayout2KHR<'_> {} unsafe impl Sync for SubresourceLayout2KHR<'_> {} impl ::core::default::Default for SubresourceLayout2KHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subresource_layout: SubresourceLayout::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SubresourceLayout2KHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SUBRESOURCE_LAYOUT_2_KHR; } pub unsafe trait ExtendsSubresourceLayout2KHR {} impl<'a> SubresourceLayout2KHR<'a> { #[inline] pub fn subresource_layout(mut self, subresource_layout: SubresourceLayout) -> Self { self.subresource_layout = subresource_layout; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassCreationControlEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub disallow_merging: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassCreationControlEXT<'_> {} unsafe impl Sync for RenderPassCreationControlEXT<'_> {} impl ::core::default::Default for RenderPassCreationControlEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), disallow_merging: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassCreationControlEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATION_CONTROL_EXT; } unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationControlEXT<'_> {} unsafe impl ExtendsSubpassDescription2 for RenderPassCreationControlEXT<'_> {} impl<'a> RenderPassCreationControlEXT<'a> { #[inline] pub fn disallow_merging(mut self, disallow_merging: bool) -> Self { self.disallow_merging = disallow_merging.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct RenderPassCreationFeedbackInfoEXT { pub post_merge_subpass_count: u32, } impl RenderPassCreationFeedbackInfoEXT { #[inline] pub fn post_merge_subpass_count(mut self, post_merge_subpass_count: u32) -> Self { self.post_merge_subpass_count = post_merge_subpass_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassCreationFeedbackCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_render_pass_feedback: *mut RenderPassCreationFeedbackInfoEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassCreationFeedbackCreateInfoEXT<'_> {} unsafe impl Sync for RenderPassCreationFeedbackCreateInfoEXT<'_> {} impl ::core::default::Default for RenderPassCreationFeedbackCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_render_pass_feedback: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassCreationFeedbackCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT; } unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationFeedbackCreateInfoEXT<'_> {} impl<'a> RenderPassCreationFeedbackCreateInfoEXT<'a> { #[inline] pub fn render_pass_feedback( mut self, render_pass_feedback: &'a mut RenderPassCreationFeedbackInfoEXT, ) -> Self { self.p_render_pass_feedback = render_pass_feedback; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassSubpassFeedbackInfoEXT { pub subpass_merge_status: SubpassMergeStatusEXT, pub description: [c_char; MAX_DESCRIPTION_SIZE], pub post_merge_index: u32, } #[cfg(feature = "debug")] impl fmt::Debug for RenderPassSubpassFeedbackInfoEXT { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("RenderPassSubpassFeedbackInfoEXT") .field("subpass_merge_status", &self.subpass_merge_status) .field("description", &self.description_as_c_str()) .field("post_merge_index", &self.post_merge_index) .finish() } } impl ::core::default::Default for RenderPassSubpassFeedbackInfoEXT { #[inline] fn default() -> Self { Self { subpass_merge_status: SubpassMergeStatusEXT::default(), description: unsafe { ::core::mem::zeroed() }, post_merge_index: u32::default(), } } } impl RenderPassSubpassFeedbackInfoEXT { #[inline] pub fn subpass_merge_status(mut self, subpass_merge_status: SubpassMergeStatusEXT) -> Self { self.subpass_merge_status = subpass_merge_status; self } #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn post_merge_index(mut self, post_merge_index: u32) -> Self { self.post_merge_index = post_merge_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassSubpassFeedbackCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_subpass_feedback: *mut RenderPassSubpassFeedbackInfoEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassSubpassFeedbackCreateInfoEXT<'_> {} unsafe impl Sync for RenderPassSubpassFeedbackCreateInfoEXT<'_> {} impl ::core::default::Default for RenderPassSubpassFeedbackCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_subpass_feedback: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassSubpassFeedbackCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT; } unsafe impl ExtendsSubpassDescription2 for RenderPassSubpassFeedbackCreateInfoEXT<'_> {} impl<'a> RenderPassSubpassFeedbackCreateInfoEXT<'a> { #[inline] pub fn subpass_feedback( mut self, subpass_feedback: &'a mut RenderPassSubpassFeedbackInfoEXT, ) -> Self { self.p_subpass_feedback = subpass_feedback; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub subpass_merge_feedback: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), subpass_merge_feedback: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'_> {} impl<'a> PhysicalDeviceSubpassMergeFeedbackFeaturesEXT<'a> { #[inline] pub fn subpass_merge_feedback(mut self, subpass_merge_feedback: bool) -> Self { self.subpass_merge_feedback = subpass_merge_feedback.into(); self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MicromapBuildInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub ty: MicromapTypeEXT, pub flags: BuildMicromapFlagsEXT, pub mode: BuildMicromapModeEXT, pub dst_micromap: MicromapEXT, pub usage_counts_count: u32, pub p_usage_counts: *const MicromapUsageEXT, pub pp_usage_counts: *const *const MicromapUsageEXT, pub data: DeviceOrHostAddressConstKHR, pub scratch_data: DeviceOrHostAddressKHR, pub triangle_array: DeviceOrHostAddressConstKHR, pub triangle_array_stride: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MicromapBuildInfoEXT<'_> {} unsafe impl Sync for MicromapBuildInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for MicromapBuildInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("MicromapBuildInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("ty", &self.ty) .field("flags", &self.flags) .field("mode", &self.mode) .field("dst_micromap", &self.dst_micromap) .field("usage_counts_count", &self.usage_counts_count) .field("p_usage_counts", &self.p_usage_counts) .field("pp_usage_counts", &self.pp_usage_counts) .field("data", &"union") .field("scratch_data", &"union") .field("triangle_array", &"union") .field("triangle_array_stride", &self.triangle_array_stride) .finish() } } impl ::core::default::Default for MicromapBuildInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), ty: MicromapTypeEXT::default(), flags: BuildMicromapFlagsEXT::default(), mode: BuildMicromapModeEXT::default(), dst_micromap: MicromapEXT::default(), usage_counts_count: u32::default(), p_usage_counts: ::core::ptr::null(), pp_usage_counts: ::core::ptr::null(), data: DeviceOrHostAddressConstKHR::default(), scratch_data: DeviceOrHostAddressKHR::default(), triangle_array: DeviceOrHostAddressConstKHR::default(), triangle_array_stride: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MicromapBuildInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_BUILD_INFO_EXT; } impl<'a> MicromapBuildInfoEXT<'a> { #[inline] pub fn ty(mut self, ty: MicromapTypeEXT) -> Self { self.ty = ty; self } #[inline] pub fn flags(mut self, flags: BuildMicromapFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn mode(mut self, mode: BuildMicromapModeEXT) -> Self { self.mode = mode; self } #[inline] pub fn dst_micromap(mut self, dst_micromap: MicromapEXT) -> Self { self.dst_micromap = dst_micromap; self } #[inline] pub fn usage_counts(mut self, usage_counts: &'a [MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts.len() as _; self.p_usage_counts = usage_counts.as_ptr(); self } #[inline] pub fn usage_counts_ptrs(mut self, usage_counts_ptrs: &'a [&'a MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts_ptrs.len() as _; self.pp_usage_counts = usage_counts_ptrs.as_ptr().cast(); self } #[inline] pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self { self.data = data; self } #[inline] pub fn scratch_data(mut self, scratch_data: DeviceOrHostAddressKHR) -> Self { self.scratch_data = scratch_data; self } #[inline] pub fn triangle_array(mut self, triangle_array: DeviceOrHostAddressConstKHR) -> Self { self.triangle_array = triangle_array; self } #[inline] pub fn triangle_array_stride(mut self, triangle_array_stride: DeviceSize) -> Self { self.triangle_array_stride = triangle_array_stride; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MicromapCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub create_flags: MicromapCreateFlagsEXT, pub buffer: Buffer, pub offset: DeviceSize, pub size: DeviceSize, pub ty: MicromapTypeEXT, pub device_address: DeviceAddress, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MicromapCreateInfoEXT<'_> {} unsafe impl Sync for MicromapCreateInfoEXT<'_> {} impl ::core::default::Default for MicromapCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), create_flags: MicromapCreateFlagsEXT::default(), buffer: Buffer::default(), offset: DeviceSize::default(), size: DeviceSize::default(), ty: MicromapTypeEXT::default(), device_address: DeviceAddress::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MicromapCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_CREATE_INFO_EXT; } impl<'a> MicromapCreateInfoEXT<'a> { #[inline] pub fn create_flags(mut self, create_flags: MicromapCreateFlagsEXT) -> Self { self.create_flags = create_flags; self } #[inline] pub fn buffer(mut self, buffer: Buffer) -> Self { self.buffer = buffer; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn ty(mut self, ty: MicromapTypeEXT) -> Self { self.ty = ty; self } #[inline] pub fn device_address(mut self, device_address: DeviceAddress) -> Self { self.device_address = device_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MicromapVersionInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_version_data: *const [u8; 2 * UUID_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MicromapVersionInfoEXT<'_> {} unsafe impl Sync for MicromapVersionInfoEXT<'_> {} impl ::core::default::Default for MicromapVersionInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_version_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MicromapVersionInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_VERSION_INFO_EXT; } impl<'a> MicromapVersionInfoEXT<'a> { #[inline] pub fn version_data(mut self, version_data: &'a [u8; 2 * UUID_SIZE]) -> Self { self.p_version_data = version_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyMicromapInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: MicromapEXT, pub dst: MicromapEXT, pub mode: CopyMicromapModeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyMicromapInfoEXT<'_> {} unsafe impl Sync for CopyMicromapInfoEXT<'_> {} impl ::core::default::Default for CopyMicromapInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: MicromapEXT::default(), dst: MicromapEXT::default(), mode: CopyMicromapModeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyMicromapInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_MICROMAP_INFO_EXT; } impl<'a> CopyMicromapInfoEXT<'a> { #[inline] pub fn src(mut self, src: MicromapEXT) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: MicromapEXT) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self { self.mode = mode; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyMicromapToMemoryInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: MicromapEXT, pub dst: DeviceOrHostAddressKHR, pub mode: CopyMicromapModeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyMicromapToMemoryInfoEXT<'_> {} unsafe impl Sync for CopyMicromapToMemoryInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for CopyMicromapToMemoryInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("CopyMicromapToMemoryInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("src", &self.src) .field("dst", &"union") .field("mode", &self.mode) .finish() } } impl ::core::default::Default for CopyMicromapToMemoryInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: MicromapEXT::default(), dst: DeviceOrHostAddressKHR::default(), mode: CopyMicromapModeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyMicromapToMemoryInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_MICROMAP_TO_MEMORY_INFO_EXT; } impl<'a> CopyMicromapToMemoryInfoEXT<'a> { #[inline] pub fn src(mut self, src: MicromapEXT) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: DeviceOrHostAddressKHR) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self { self.mode = mode; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CopyMemoryToMicromapInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub src: DeviceOrHostAddressConstKHR, pub dst: MicromapEXT, pub mode: CopyMicromapModeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CopyMemoryToMicromapInfoEXT<'_> {} unsafe impl Sync for CopyMemoryToMicromapInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for CopyMemoryToMicromapInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("CopyMemoryToMicromapInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("src", &"union") .field("dst", &self.dst) .field("mode", &self.mode) .finish() } } impl ::core::default::Default for CopyMemoryToMicromapInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), src: DeviceOrHostAddressConstKHR::default(), dst: MicromapEXT::default(), mode: CopyMicromapModeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CopyMemoryToMicromapInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COPY_MEMORY_TO_MICROMAP_INFO_EXT; } impl<'a> CopyMemoryToMicromapInfoEXT<'a> { #[inline] pub fn src(mut self, src: DeviceOrHostAddressConstKHR) -> Self { self.src = src; self } #[inline] pub fn dst(mut self, dst: MicromapEXT) -> Self { self.dst = dst; self } #[inline] pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self { self.mode = mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MicromapBuildSizesInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub micromap_size: DeviceSize, pub build_scratch_size: DeviceSize, pub discardable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MicromapBuildSizesInfoEXT<'_> {} unsafe impl Sync for MicromapBuildSizesInfoEXT<'_> {} impl ::core::default::Default for MicromapBuildSizesInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), micromap_size: DeviceSize::default(), build_scratch_size: DeviceSize::default(), discardable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MicromapBuildSizesInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_BUILD_SIZES_INFO_EXT; } impl<'a> MicromapBuildSizesInfoEXT<'a> { #[inline] pub fn micromap_size(mut self, micromap_size: DeviceSize) -> Self { self.micromap_size = micromap_size; self } #[inline] pub fn build_scratch_size(mut self, build_scratch_size: DeviceSize) -> Self { self.build_scratch_size = build_scratch_size; self } #[inline] pub fn discardable(mut self, discardable: bool) -> Self { self.discardable = discardable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MicromapUsageEXT { pub count: u32, pub subdivision_level: u32, pub format: u32, } impl MicromapUsageEXT { #[inline] pub fn count(mut self, count: u32) -> Self { self.count = count; self } #[inline] pub fn subdivision_level(mut self, subdivision_level: u32) -> Self { self.subdivision_level = subdivision_level; self } #[inline] pub fn format(mut self, format: u32) -> Self { self.format = format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct MicromapTriangleEXT { pub data_offset: u32, pub subdivision_level: u16, pub format: u16, } impl MicromapTriangleEXT { #[inline] pub fn data_offset(mut self, data_offset: u32) -> Self { self.data_offset = data_offset; self } #[inline] pub fn subdivision_level(mut self, subdivision_level: u16) -> Self { self.subdivision_level = subdivision_level; self } #[inline] pub fn format(mut self, format: u16) -> Self { self.format = format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceOpacityMicromapFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub micromap: Bool32, pub micromap_capture_replay: Bool32, pub micromap_host_commands: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceOpacityMicromapFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceOpacityMicromapFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceOpacityMicromapFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), micromap: Bool32::default(), micromap_capture_replay: Bool32::default(), micromap_host_commands: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceOpacityMicromapFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpacityMicromapFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpacityMicromapFeaturesEXT<'_> {} impl<'a> PhysicalDeviceOpacityMicromapFeaturesEXT<'a> { #[inline] pub fn micromap(mut self, micromap: bool) -> Self { self.micromap = micromap.into(); self } #[inline] pub fn micromap_capture_replay(mut self, micromap_capture_replay: bool) -> Self { self.micromap_capture_replay = micromap_capture_replay.into(); self } #[inline] pub fn micromap_host_commands(mut self, micromap_host_commands: bool) -> Self { self.micromap_host_commands = micromap_host_commands.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceOpacityMicromapPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_opacity2_state_subdivision_level: u32, pub max_opacity4_state_subdivision_level: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceOpacityMicromapPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceOpacityMicromapPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceOpacityMicromapPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_opacity2_state_subdivision_level: u32::default(), max_opacity4_state_subdivision_level: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceOpacityMicromapPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceOpacityMicromapPropertiesEXT<'_> {} impl<'a> PhysicalDeviceOpacityMicromapPropertiesEXT<'a> { #[inline] pub fn max_opacity2_state_subdivision_level( mut self, max_opacity2_state_subdivision_level: u32, ) -> Self { self.max_opacity2_state_subdivision_level = max_opacity2_state_subdivision_level; self } #[inline] pub fn max_opacity4_state_subdivision_level( mut self, max_opacity4_state_subdivision_level: u32, ) -> Self { self.max_opacity4_state_subdivision_level = max_opacity4_state_subdivision_level; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureTrianglesOpacityMicromapEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub index_type: IndexType, pub index_buffer: DeviceOrHostAddressConstKHR, pub index_stride: DeviceSize, pub base_triangle: u32, pub usage_counts_count: u32, pub p_usage_counts: *const MicromapUsageEXT, pub pp_usage_counts: *const *const MicromapUsageEXT, pub micromap: MicromapEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureTrianglesOpacityMicromapEXT<'_> {} unsafe impl Sync for AccelerationStructureTrianglesOpacityMicromapEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureTrianglesOpacityMicromapEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureTrianglesOpacityMicromapEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("index_type", &self.index_type) .field("index_buffer", &"union") .field("index_stride", &self.index_stride) .field("base_triangle", &self.base_triangle) .field("usage_counts_count", &self.usage_counts_count) .field("p_usage_counts", &self.p_usage_counts) .field("pp_usage_counts", &self.pp_usage_counts) .field("micromap", &self.micromap) .finish() } } impl ::core::default::Default for AccelerationStructureTrianglesOpacityMicromapEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), index_type: IndexType::default(), index_buffer: DeviceOrHostAddressConstKHR::default(), index_stride: DeviceSize::default(), base_triangle: u32::default(), usage_counts_count: u32::default(), p_usage_counts: ::core::ptr::null(), pp_usage_counts: ::core::ptr::null(), micromap: MicromapEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureTrianglesOpacityMicromapEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT; } unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR for AccelerationStructureTrianglesOpacityMicromapEXT<'_> { } impl<'a> AccelerationStructureTrianglesOpacityMicromapEXT<'a> { #[inline] pub fn index_type(mut self, index_type: IndexType) -> Self { self.index_type = index_type; self } #[inline] pub fn index_buffer(mut self, index_buffer: DeviceOrHostAddressConstKHR) -> Self { self.index_buffer = index_buffer; self } #[inline] pub fn index_stride(mut self, index_stride: DeviceSize) -> Self { self.index_stride = index_stride; self } #[inline] pub fn base_triangle(mut self, base_triangle: u32) -> Self { self.base_triangle = base_triangle; self } #[inline] pub fn usage_counts(mut self, usage_counts: &'a [MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts.len() as _; self.p_usage_counts = usage_counts.as_ptr(); self } #[inline] pub fn usage_counts_ptrs(mut self, usage_counts_ptrs: &'a [&'a MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts_ptrs.len() as _; self.pp_usage_counts = usage_counts_ptrs.as_ptr().cast(); self } #[inline] pub fn micromap(mut self, micromap: MicromapEXT) -> Self { self.micromap = micromap; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDisplacementMicromapFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub displacement_micromap: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDisplacementMicromapFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDisplacementMicromapFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDisplacementMicromapFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), displacement_micromap: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDisplacementMicromapFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDisplacementMicromapFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDisplacementMicromapFeaturesNV<'_> {} impl<'a> PhysicalDeviceDisplacementMicromapFeaturesNV<'a> { #[inline] pub fn displacement_micromap(mut self, displacement_micromap: bool) -> Self { self.displacement_micromap = displacement_micromap.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDisplacementMicromapPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_displacement_micromap_subdivision_level: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDisplacementMicromapPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceDisplacementMicromapPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDisplacementMicromapPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_displacement_micromap_subdivision_level: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDisplacementMicromapPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDisplacementMicromapPropertiesNV<'_> { } impl<'a> PhysicalDeviceDisplacementMicromapPropertiesNV<'a> { #[inline] pub fn max_displacement_micromap_subdivision_level( mut self, max_displacement_micromap_subdivision_level: u32, ) -> Self { self.max_displacement_micromap_subdivision_level = max_displacement_micromap_subdivision_level; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AccelerationStructureTrianglesDisplacementMicromapNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub displacement_bias_and_scale_format: Format, pub displacement_vector_format: Format, pub displacement_bias_and_scale_buffer: DeviceOrHostAddressConstKHR, pub displacement_bias_and_scale_stride: DeviceSize, pub displacement_vector_buffer: DeviceOrHostAddressConstKHR, pub displacement_vector_stride: DeviceSize, pub displaced_micromap_primitive_flags: DeviceOrHostAddressConstKHR, pub displaced_micromap_primitive_flags_stride: DeviceSize, pub index_type: IndexType, pub index_buffer: DeviceOrHostAddressConstKHR, pub index_stride: DeviceSize, pub base_triangle: u32, pub usage_counts_count: u32, pub p_usage_counts: *const MicromapUsageEXT, pub pp_usage_counts: *const *const MicromapUsageEXT, pub micromap: MicromapEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AccelerationStructureTrianglesDisplacementMicromapNV<'_> {} unsafe impl Sync for AccelerationStructureTrianglesDisplacementMicromapNV<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for AccelerationStructureTrianglesDisplacementMicromapNV<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("AccelerationStructureTrianglesDisplacementMicromapNV") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field( "displacement_bias_and_scale_format", &self.displacement_bias_and_scale_format, ) .field( "displacement_vector_format", &self.displacement_vector_format, ) .field("displacement_bias_and_scale_buffer", &"union") .field( "displacement_bias_and_scale_stride", &self.displacement_bias_and_scale_stride, ) .field("displacement_vector_buffer", &"union") .field( "displacement_vector_stride", &self.displacement_vector_stride, ) .field("displaced_micromap_primitive_flags", &"union") .field( "displaced_micromap_primitive_flags_stride", &self.displaced_micromap_primitive_flags_stride, ) .field("index_type", &self.index_type) .field("index_buffer", &"union") .field("index_stride", &self.index_stride) .field("base_triangle", &self.base_triangle) .field("usage_counts_count", &self.usage_counts_count) .field("p_usage_counts", &self.p_usage_counts) .field("pp_usage_counts", &self.pp_usage_counts) .field("micromap", &self.micromap) .finish() } } impl ::core::default::Default for AccelerationStructureTrianglesDisplacementMicromapNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), displacement_bias_and_scale_format: Format::default(), displacement_vector_format: Format::default(), displacement_bias_and_scale_buffer: DeviceOrHostAddressConstKHR::default(), displacement_bias_and_scale_stride: DeviceSize::default(), displacement_vector_buffer: DeviceOrHostAddressConstKHR::default(), displacement_vector_stride: DeviceSize::default(), displaced_micromap_primitive_flags: DeviceOrHostAddressConstKHR::default(), displaced_micromap_primitive_flags_stride: DeviceSize::default(), index_type: IndexType::default(), index_buffer: DeviceOrHostAddressConstKHR::default(), index_stride: DeviceSize::default(), base_triangle: u32::default(), usage_counts_count: u32::default(), p_usage_counts: ::core::ptr::null(), pp_usage_counts: ::core::ptr::null(), micromap: MicromapEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AccelerationStructureTrianglesDisplacementMicromapNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV; } unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR for AccelerationStructureTrianglesDisplacementMicromapNV<'_> { } impl<'a> AccelerationStructureTrianglesDisplacementMicromapNV<'a> { #[inline] pub fn displacement_bias_and_scale_format( mut self, displacement_bias_and_scale_format: Format, ) -> Self { self.displacement_bias_and_scale_format = displacement_bias_and_scale_format; self } #[inline] pub fn displacement_vector_format(mut self, displacement_vector_format: Format) -> Self { self.displacement_vector_format = displacement_vector_format; self } #[inline] pub fn displacement_bias_and_scale_buffer( mut self, displacement_bias_and_scale_buffer: DeviceOrHostAddressConstKHR, ) -> Self { self.displacement_bias_and_scale_buffer = displacement_bias_and_scale_buffer; self } #[inline] pub fn displacement_bias_and_scale_stride( mut self, displacement_bias_and_scale_stride: DeviceSize, ) -> Self { self.displacement_bias_and_scale_stride = displacement_bias_and_scale_stride; self } #[inline] pub fn displacement_vector_buffer( mut self, displacement_vector_buffer: DeviceOrHostAddressConstKHR, ) -> Self { self.displacement_vector_buffer = displacement_vector_buffer; self } #[inline] pub fn displacement_vector_stride(mut self, displacement_vector_stride: DeviceSize) -> Self { self.displacement_vector_stride = displacement_vector_stride; self } #[inline] pub fn displaced_micromap_primitive_flags( mut self, displaced_micromap_primitive_flags: DeviceOrHostAddressConstKHR, ) -> Self { self.displaced_micromap_primitive_flags = displaced_micromap_primitive_flags; self } #[inline] pub fn displaced_micromap_primitive_flags_stride( mut self, displaced_micromap_primitive_flags_stride: DeviceSize, ) -> Self { self.displaced_micromap_primitive_flags_stride = displaced_micromap_primitive_flags_stride; self } #[inline] pub fn index_type(mut self, index_type: IndexType) -> Self { self.index_type = index_type; self } #[inline] pub fn index_buffer(mut self, index_buffer: DeviceOrHostAddressConstKHR) -> Self { self.index_buffer = index_buffer; self } #[inline] pub fn index_stride(mut self, index_stride: DeviceSize) -> Self { self.index_stride = index_stride; self } #[inline] pub fn base_triangle(mut self, base_triangle: u32) -> Self { self.base_triangle = base_triangle; self } #[inline] pub fn usage_counts(mut self, usage_counts: &'a [MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts.len() as _; self.p_usage_counts = usage_counts.as_ptr(); self } #[inline] pub fn usage_counts_ptrs(mut self, usage_counts_ptrs: &'a [&'a MicromapUsageEXT]) -> Self { self.usage_counts_count = usage_counts_ptrs.len() as _; self.pp_usage_counts = usage_counts_ptrs.as_ptr().cast(); self } #[inline] pub fn micromap(mut self, micromap: MicromapEXT) -> Self { self.micromap = micromap; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelinePropertiesIdentifierEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_identifier: [u8; UUID_SIZE], pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelinePropertiesIdentifierEXT<'_> {} unsafe impl Sync for PipelinePropertiesIdentifierEXT<'_> {} impl ::core::default::Default for PipelinePropertiesIdentifierEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_identifier: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelinePropertiesIdentifierEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_PROPERTIES_IDENTIFIER_EXT; } impl<'a> PipelinePropertiesIdentifierEXT<'a> { #[inline] pub fn pipeline_identifier(mut self, pipeline_identifier: [u8; UUID_SIZE]) -> Self { self.pipeline_identifier = pipeline_identifier; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelinePropertiesFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_properties_identifier: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelinePropertiesFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePipelinePropertiesFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePipelinePropertiesFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_properties_identifier: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelinePropertiesFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelinePropertiesFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelinePropertiesFeaturesEXT<'_> {} impl<'a> PhysicalDevicePipelinePropertiesFeaturesEXT<'a> { #[inline] pub fn pipeline_properties_identifier(mut self, pipeline_properties_identifier: bool) -> Self { self.pipeline_properties_identifier = pipeline_properties_identifier.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_early_and_late_fragment_tests: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'_> {} unsafe impl Sync for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'_> {} impl ::core::default::Default for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_early_and_late_fragment_tests: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'_> { } impl<'a> PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD<'a> { #[inline] pub fn shader_early_and_late_fragment_tests( mut self, shader_early_and_late_fragment_tests: bool, ) -> Self { self.shader_early_and_late_fragment_tests = shader_early_and_late_fragment_tests.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalMemoryAcquireUnmodifiedEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub acquire_unmodified_memory: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalMemoryAcquireUnmodifiedEXT<'_> {} unsafe impl Sync for ExternalMemoryAcquireUnmodifiedEXT<'_> {} impl ::core::default::Default for ExternalMemoryAcquireUnmodifiedEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), acquire_unmodified_memory: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalMemoryAcquireUnmodifiedEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT; } unsafe impl ExtendsBufferMemoryBarrier for ExternalMemoryAcquireUnmodifiedEXT<'_> {} unsafe impl ExtendsBufferMemoryBarrier2 for ExternalMemoryAcquireUnmodifiedEXT<'_> {} unsafe impl ExtendsImageMemoryBarrier for ExternalMemoryAcquireUnmodifiedEXT<'_> {} unsafe impl ExtendsImageMemoryBarrier2 for ExternalMemoryAcquireUnmodifiedEXT<'_> {} impl<'a> ExternalMemoryAcquireUnmodifiedEXT<'a> { #[inline] pub fn acquire_unmodified_memory(mut self, acquire_unmodified_memory: bool) -> Self { self.acquire_unmodified_memory = acquire_unmodified_memory.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalObjectCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub export_object_type: ExportMetalObjectTypeFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl Sync for ExportMetalObjectCreateInfoEXT<'_> {} impl ::core::default::Default for ExportMetalObjectCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), export_object_type: ExportMetalObjectTypeFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalObjectCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_OBJECT_CREATE_INFO_EXT; } unsafe impl ExtendsInstanceCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsMemoryAllocateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsImageCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsImageViewCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsBufferViewCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsSemaphoreCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} unsafe impl ExtendsEventCreateInfo for ExportMetalObjectCreateInfoEXT<'_> {} impl<'a> ExportMetalObjectCreateInfoEXT<'a> { #[inline] pub fn export_object_type(mut self, export_object_type: ExportMetalObjectTypeFlagsEXT) -> Self { self.export_object_type = export_object_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalObjectsInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalObjectsInfoEXT<'_> {} unsafe impl Sync for ExportMetalObjectsInfoEXT<'_> {} impl ::core::default::Default for ExportMetalObjectsInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalObjectsInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_OBJECTS_INFO_EXT; } pub unsafe trait ExtendsExportMetalObjectsInfoEXT {} impl<'a> ExportMetalObjectsInfoEXT<'a> { #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalDeviceInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mtl_device: MTLDevice_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalDeviceInfoEXT<'_> {} unsafe impl Sync for ExportMetalDeviceInfoEXT<'_> {} impl ::core::default::Default for ExportMetalDeviceInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mtl_device: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalDeviceInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_DEVICE_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalDeviceInfoEXT<'_> {} impl<'a> ExportMetalDeviceInfoEXT<'a> { #[inline] pub fn mtl_device(mut self, mtl_device: MTLDevice_id) -> Self { self.mtl_device = mtl_device; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalCommandQueueInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub queue: Queue, pub mtl_command_queue: MTLCommandQueue_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalCommandQueueInfoEXT<'_> {} unsafe impl Sync for ExportMetalCommandQueueInfoEXT<'_> {} impl ::core::default::Default for ExportMetalCommandQueueInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), queue: Queue::default(), mtl_command_queue: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalCommandQueueInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_COMMAND_QUEUE_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalCommandQueueInfoEXT<'_> {} impl<'a> ExportMetalCommandQueueInfoEXT<'a> { #[inline] pub fn queue(mut self, queue: Queue) -> Self { self.queue = queue; self } #[inline] pub fn mtl_command_queue(mut self, mtl_command_queue: MTLCommandQueue_id) -> Self { self.mtl_command_queue = mtl_command_queue; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalBufferInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub memory: DeviceMemory, pub mtl_buffer: MTLBuffer_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalBufferInfoEXT<'_> {} unsafe impl Sync for ExportMetalBufferInfoEXT<'_> {} impl ::core::default::Default for ExportMetalBufferInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), memory: DeviceMemory::default(), mtl_buffer: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalBufferInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_BUFFER_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalBufferInfoEXT<'_> {} impl<'a> ExportMetalBufferInfoEXT<'a> { #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn mtl_buffer(mut self, mtl_buffer: MTLBuffer_id) -> Self { self.mtl_buffer = mtl_buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMetalBufferInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mtl_buffer: MTLBuffer_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMetalBufferInfoEXT<'_> {} unsafe impl Sync for ImportMetalBufferInfoEXT<'_> {} impl ::core::default::Default for ImportMetalBufferInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mtl_buffer: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMetalBufferInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_BUFFER_INFO_EXT; } unsafe impl ExtendsMemoryAllocateInfo for ImportMetalBufferInfoEXT<'_> {} impl<'a> ImportMetalBufferInfoEXT<'a> { #[inline] pub fn mtl_buffer(mut self, mtl_buffer: MTLBuffer_id) -> Self { self.mtl_buffer = mtl_buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalTextureInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub image_view: ImageView, pub buffer_view: BufferView, pub plane: ImageAspectFlags, pub mtl_texture: MTLTexture_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalTextureInfoEXT<'_> {} unsafe impl Sync for ExportMetalTextureInfoEXT<'_> {} impl ::core::default::Default for ExportMetalTextureInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), image_view: ImageView::default(), buffer_view: BufferView::default(), plane: ImageAspectFlags::default(), mtl_texture: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalTextureInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_TEXTURE_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalTextureInfoEXT<'_> {} impl<'a> ExportMetalTextureInfoEXT<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn image_view(mut self, image_view: ImageView) -> Self { self.image_view = image_view; self } #[inline] pub fn buffer_view(mut self, buffer_view: BufferView) -> Self { self.buffer_view = buffer_view; self } #[inline] pub fn plane(mut self, plane: ImageAspectFlags) -> Self { self.plane = plane; self } #[inline] pub fn mtl_texture(mut self, mtl_texture: MTLTexture_id) -> Self { self.mtl_texture = mtl_texture; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMetalTextureInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub plane: ImageAspectFlags, pub mtl_texture: MTLTexture_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMetalTextureInfoEXT<'_> {} unsafe impl Sync for ImportMetalTextureInfoEXT<'_> {} impl ::core::default::Default for ImportMetalTextureInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), plane: ImageAspectFlags::default(), mtl_texture: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMetalTextureInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_TEXTURE_INFO_EXT; } unsafe impl ExtendsImageCreateInfo for ImportMetalTextureInfoEXT<'_> {} impl<'a> ImportMetalTextureInfoEXT<'a> { #[inline] pub fn plane(mut self, plane: ImageAspectFlags) -> Self { self.plane = plane; self } #[inline] pub fn mtl_texture(mut self, mtl_texture: MTLTexture_id) -> Self { self.mtl_texture = mtl_texture; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalIOSurfaceInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub image: Image, pub io_surface: IOSurfaceRef, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalIOSurfaceInfoEXT<'_> {} unsafe impl Sync for ExportMetalIOSurfaceInfoEXT<'_> {} impl ::core::default::Default for ExportMetalIOSurfaceInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), image: Image::default(), io_surface: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalIOSurfaceInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_IO_SURFACE_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalIOSurfaceInfoEXT<'_> {} impl<'a> ExportMetalIOSurfaceInfoEXT<'a> { #[inline] pub fn image(mut self, image: Image) -> Self { self.image = image; self } #[inline] pub fn io_surface(mut self, io_surface: IOSurfaceRef) -> Self { self.io_surface = io_surface; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMetalIOSurfaceInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub io_surface: IOSurfaceRef, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMetalIOSurfaceInfoEXT<'_> {} unsafe impl Sync for ImportMetalIOSurfaceInfoEXT<'_> {} impl ::core::default::Default for ImportMetalIOSurfaceInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), io_surface: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMetalIOSurfaceInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_IO_SURFACE_INFO_EXT; } unsafe impl ExtendsImageCreateInfo for ImportMetalIOSurfaceInfoEXT<'_> {} impl<'a> ImportMetalIOSurfaceInfoEXT<'a> { #[inline] pub fn io_surface(mut self, io_surface: IOSurfaceRef) -> Self { self.io_surface = io_surface; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExportMetalSharedEventInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub semaphore: Semaphore, pub event: Event, pub mtl_shared_event: MTLSharedEvent_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExportMetalSharedEventInfoEXT<'_> {} unsafe impl Sync for ExportMetalSharedEventInfoEXT<'_> {} impl ::core::default::Default for ExportMetalSharedEventInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), semaphore: Semaphore::default(), event: Event::default(), mtl_shared_event: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExportMetalSharedEventInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_SHARED_EVENT_INFO_EXT; } unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalSharedEventInfoEXT<'_> {} impl<'a> ExportMetalSharedEventInfoEXT<'a> { #[inline] pub fn semaphore(mut self, semaphore: Semaphore) -> Self { self.semaphore = semaphore; self } #[inline] pub fn event(mut self, event: Event) -> Self { self.event = event; self } #[inline] pub fn mtl_shared_event(mut self, mtl_shared_event: MTLSharedEvent_id) -> Self { self.mtl_shared_event = mtl_shared_event; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportMetalSharedEventInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mtl_shared_event: MTLSharedEvent_id, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportMetalSharedEventInfoEXT<'_> {} unsafe impl Sync for ImportMetalSharedEventInfoEXT<'_> {} impl ::core::default::Default for ImportMetalSharedEventInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mtl_shared_event: unsafe { ::core::mem::zeroed() }, _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportMetalSharedEventInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_SHARED_EVENT_INFO_EXT; } unsafe impl ExtendsSemaphoreCreateInfo for ImportMetalSharedEventInfoEXT<'_> {} unsafe impl ExtendsEventCreateInfo for ImportMetalSharedEventInfoEXT<'_> {} impl<'a> ImportMetalSharedEventInfoEXT<'a> { #[inline] pub fn mtl_shared_event(mut self, mtl_shared_event: MTLSharedEvent_id) -> Self { self.mtl_shared_event = mtl_shared_event; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub non_seamless_cube_map: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), non_seamless_cube_map: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'_> {} impl<'a> PhysicalDeviceNonSeamlessCubeMapFeaturesEXT<'a> { #[inline] pub fn non_seamless_cube_map(mut self, non_seamless_cube_map: bool) -> Self { self.non_seamless_cube_map = non_seamless_cube_map.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineRobustnessFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_robustness: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineRobustnessFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePipelineRobustnessFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePipelineRobustnessFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_robustness: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineRobustnessFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineRobustnessFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineRobustnessFeaturesEXT<'_> {} impl<'a> PhysicalDevicePipelineRobustnessFeaturesEXT<'a> { #[inline] pub fn pipeline_robustness(mut self, pipeline_robustness: bool) -> Self { self.pipeline_robustness = pipeline_robustness.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineRobustnessCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub storage_buffers: PipelineRobustnessBufferBehaviorEXT, pub uniform_buffers: PipelineRobustnessBufferBehaviorEXT, pub vertex_inputs: PipelineRobustnessBufferBehaviorEXT, pub images: PipelineRobustnessImageBehaviorEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineRobustnessCreateInfoEXT<'_> {} unsafe impl Sync for PipelineRobustnessCreateInfoEXT<'_> {} impl ::core::default::Default for PipelineRobustnessCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), storage_buffers: PipelineRobustnessBufferBehaviorEXT::default(), uniform_buffers: PipelineRobustnessBufferBehaviorEXT::default(), vertex_inputs: PipelineRobustnessBufferBehaviorEXT::default(), images: PipelineRobustnessImageBehaviorEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineRobustnessCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_ROBUSTNESS_CREATE_INFO_EXT; } unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRobustnessCreateInfoEXT<'_> {} unsafe impl ExtendsComputePipelineCreateInfo for PipelineRobustnessCreateInfoEXT<'_> {} unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineRobustnessCreateInfoEXT<'_> {} unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineRobustnessCreateInfoEXT<'_> {} impl<'a> PipelineRobustnessCreateInfoEXT<'a> { #[inline] pub fn storage_buffers(mut self, storage_buffers: PipelineRobustnessBufferBehaviorEXT) -> Self { self.storage_buffers = storage_buffers; self } #[inline] pub fn uniform_buffers(mut self, uniform_buffers: PipelineRobustnessBufferBehaviorEXT) -> Self { self.uniform_buffers = uniform_buffers; self } #[inline] pub fn vertex_inputs(mut self, vertex_inputs: PipelineRobustnessBufferBehaviorEXT) -> Self { self.vertex_inputs = vertex_inputs; self } #[inline] pub fn images(mut self, images: PipelineRobustnessImageBehaviorEXT) -> Self { self.images = images; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineRobustnessPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT, pub default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT, pub default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT, pub default_robustness_images: PipelineRobustnessImageBehaviorEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineRobustnessPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDevicePipelineRobustnessPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePipelineRobustnessPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT::default(), default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT::default(), default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT::default(), default_robustness_images: PipelineRobustnessImageBehaviorEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineRobustnessPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePipelineRobustnessPropertiesEXT<'_> {} impl<'a> PhysicalDevicePipelineRobustnessPropertiesEXT<'a> { #[inline] pub fn default_robustness_storage_buffers( mut self, default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT, ) -> Self { self.default_robustness_storage_buffers = default_robustness_storage_buffers; self } #[inline] pub fn default_robustness_uniform_buffers( mut self, default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT, ) -> Self { self.default_robustness_uniform_buffers = default_robustness_uniform_buffers; self } #[inline] pub fn default_robustness_vertex_inputs( mut self, default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT, ) -> Self { self.default_robustness_vertex_inputs = default_robustness_vertex_inputs; self } #[inline] pub fn default_robustness_images( mut self, default_robustness_images: PipelineRobustnessImageBehaviorEXT, ) -> Self { self.default_robustness_images = default_robustness_images; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImageViewSampleWeightCreateInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub filter_center: Offset2D, pub filter_size: Extent2D, pub num_phases: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImageViewSampleWeightCreateInfoQCOM<'_> {} unsafe impl Sync for ImageViewSampleWeightCreateInfoQCOM<'_> {} impl ::core::default::Default for ImageViewSampleWeightCreateInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), filter_center: Offset2D::default(), filter_size: Extent2D::default(), num_phases: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImageViewSampleWeightCreateInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM; } unsafe impl ExtendsImageViewCreateInfo for ImageViewSampleWeightCreateInfoQCOM<'_> {} impl<'a> ImageViewSampleWeightCreateInfoQCOM<'a> { #[inline] pub fn filter_center(mut self, filter_center: Offset2D) -> Self { self.filter_center = filter_center; self } #[inline] pub fn filter_size(mut self, filter_size: Extent2D) -> Self { self.filter_size = filter_size; self } #[inline] pub fn num_phases(mut self, num_phases: u32) -> Self { self.num_phases = num_phases; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageProcessingFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub texture_sample_weighted: Bool32, pub texture_box_filter: Bool32, pub texture_block_match: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageProcessingFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceImageProcessingFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceImageProcessingFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), texture_sample_weighted: Bool32::default(), texture_box_filter: Bool32::default(), texture_block_match: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageProcessingFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageProcessingFeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageProcessingFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceImageProcessingFeaturesQCOM<'a> { #[inline] pub fn texture_sample_weighted(mut self, texture_sample_weighted: bool) -> Self { self.texture_sample_weighted = texture_sample_weighted.into(); self } #[inline] pub fn texture_box_filter(mut self, texture_box_filter: bool) -> Self { self.texture_box_filter = texture_box_filter.into(); self } #[inline] pub fn texture_block_match(mut self, texture_block_match: bool) -> Self { self.texture_block_match = texture_block_match.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageProcessingPropertiesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_weight_filter_phases: u32, pub max_weight_filter_dimension: Extent2D, pub max_block_match_region: Extent2D, pub max_box_filter_block_size: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageProcessingPropertiesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceImageProcessingPropertiesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceImageProcessingPropertiesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_weight_filter_phases: u32::default(), max_weight_filter_dimension: Extent2D::default(), max_block_match_region: Extent2D::default(), max_box_filter_block_size: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageProcessingPropertiesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceImageProcessingPropertiesQCOM<'_> {} impl<'a> PhysicalDeviceImageProcessingPropertiesQCOM<'a> { #[inline] pub fn max_weight_filter_phases(mut self, max_weight_filter_phases: u32) -> Self { self.max_weight_filter_phases = max_weight_filter_phases; self } #[inline] pub fn max_weight_filter_dimension(mut self, max_weight_filter_dimension: Extent2D) -> Self { self.max_weight_filter_dimension = max_weight_filter_dimension; self } #[inline] pub fn max_block_match_region(mut self, max_block_match_region: Extent2D) -> Self { self.max_block_match_region = max_block_match_region; self } #[inline] pub fn max_box_filter_block_size(mut self, max_box_filter_block_size: Extent2D) -> Self { self.max_box_filter_block_size = max_box_filter_block_size; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceTilePropertiesFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub tile_properties: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceTilePropertiesFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceTilePropertiesFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceTilePropertiesFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), tile_properties: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceTilePropertiesFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTilePropertiesFeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTilePropertiesFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceTilePropertiesFeaturesQCOM<'a> { #[inline] pub fn tile_properties(mut self, tile_properties: bool) -> Self { self.tile_properties = tile_properties.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct TilePropertiesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub tile_size: Extent3D, pub apron_size: Extent2D, pub origin: Offset2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for TilePropertiesQCOM<'_> {} unsafe impl Sync for TilePropertiesQCOM<'_> {} impl ::core::default::Default for TilePropertiesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), tile_size: Extent3D::default(), apron_size: Extent2D::default(), origin: Offset2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for TilePropertiesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::TILE_PROPERTIES_QCOM; } impl<'a> TilePropertiesQCOM<'a> { #[inline] pub fn tile_size(mut self, tile_size: Extent3D) -> Self { self.tile_size = tile_size; self } #[inline] pub fn apron_size(mut self, apron_size: Extent2D) -> Self { self.apron_size = apron_size; self } #[inline] pub fn origin(mut self, origin: Offset2D) -> Self { self.origin = origin; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAmigoProfilingFeaturesSEC<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub amigo_profiling: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAmigoProfilingFeaturesSEC<'_> {} unsafe impl Sync for PhysicalDeviceAmigoProfilingFeaturesSEC<'_> {} impl ::core::default::Default for PhysicalDeviceAmigoProfilingFeaturesSEC<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), amigo_profiling: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAmigoProfilingFeaturesSEC<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAmigoProfilingFeaturesSEC<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAmigoProfilingFeaturesSEC<'_> {} impl<'a> PhysicalDeviceAmigoProfilingFeaturesSEC<'a> { #[inline] pub fn amigo_profiling(mut self, amigo_profiling: bool) -> Self { self.amigo_profiling = amigo_profiling.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AmigoProfilingSubmitInfoSEC<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub first_draw_timestamp: u64, pub swap_buffer_timestamp: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AmigoProfilingSubmitInfoSEC<'_> {} unsafe impl Sync for AmigoProfilingSubmitInfoSEC<'_> {} impl ::core::default::Default for AmigoProfilingSubmitInfoSEC<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), first_draw_timestamp: u64::default(), swap_buffer_timestamp: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AmigoProfilingSubmitInfoSEC<'a> { const STRUCTURE_TYPE: StructureType = StructureType::AMIGO_PROFILING_SUBMIT_INFO_SEC; } unsafe impl ExtendsSubmitInfo for AmigoProfilingSubmitInfoSEC<'_> {} impl<'a> AmigoProfilingSubmitInfoSEC<'a> { #[inline] pub fn first_draw_timestamp(mut self, first_draw_timestamp: u64) -> Self { self.first_draw_timestamp = first_draw_timestamp; self } #[inline] pub fn swap_buffer_timestamp(mut self, swap_buffer_timestamp: u64) -> Self { self.swap_buffer_timestamp = swap_buffer_timestamp; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub attachment_feedback_loop_layout: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), attachment_feedback_loop_layout: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'_> {} impl<'a> PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT<'a> { #[inline] pub fn attachment_feedback_loop_layout( mut self, attachment_feedback_loop_layout: bool, ) -> Self { self.attachment_feedback_loop_layout = attachment_feedback_loop_layout.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDepthClampZeroOneFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub depth_clamp_zero_one: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), depth_clamp_zero_one: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClampZeroOneFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDepthClampZeroOneFeaturesEXT<'a> { #[inline] pub fn depth_clamp_zero_one(mut self, depth_clamp_zero_one: bool) -> Self { self.depth_clamp_zero_one = depth_clamp_zero_one.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceAddressBindingReportFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub report_address_binding: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceAddressBindingReportFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceAddressBindingReportFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceAddressBindingReportFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), report_address_binding: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceAddressBindingReportFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAddressBindingReportFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAddressBindingReportFeaturesEXT<'_> {} impl<'a> PhysicalDeviceAddressBindingReportFeaturesEXT<'a> { #[inline] pub fn report_address_binding(mut self, report_address_binding: bool) -> Self { self.report_address_binding = report_address_binding.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceAddressBindingCallbackDataEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: DeviceAddressBindingFlagsEXT, pub base_address: DeviceAddress, pub size: DeviceSize, pub binding_type: DeviceAddressBindingTypeEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceAddressBindingCallbackDataEXT<'_> {} unsafe impl Sync for DeviceAddressBindingCallbackDataEXT<'_> {} impl ::core::default::Default for DeviceAddressBindingCallbackDataEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: DeviceAddressBindingFlagsEXT::default(), base_address: DeviceAddress::default(), size: DeviceSize::default(), binding_type: DeviceAddressBindingTypeEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceAddressBindingCallbackDataEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT; } unsafe impl ExtendsDebugUtilsMessengerCallbackDataEXT for DeviceAddressBindingCallbackDataEXT<'_> {} impl<'a> DeviceAddressBindingCallbackDataEXT<'a> { #[inline] pub fn flags(mut self, flags: DeviceAddressBindingFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn base_address(mut self, base_address: DeviceAddress) -> Self { self.base_address = base_address; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[inline] pub fn binding_type(mut self, binding_type: DeviceAddressBindingTypeEXT) -> Self { self.binding_type = binding_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceOpticalFlowFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub optical_flow: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceOpticalFlowFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceOpticalFlowFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceOpticalFlowFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), optical_flow: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceOpticalFlowFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpticalFlowFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpticalFlowFeaturesNV<'_> {} impl<'a> PhysicalDeviceOpticalFlowFeaturesNV<'a> { #[inline] pub fn optical_flow(mut self, optical_flow: bool) -> Self { self.optical_flow = optical_flow.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceOpticalFlowPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV, pub supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV, pub hint_supported: Bool32, pub cost_supported: Bool32, pub bidirectional_flow_supported: Bool32, pub global_flow_supported: Bool32, pub min_width: u32, pub min_height: u32, pub max_width: u32, pub max_height: u32, pub max_num_regions_of_interest: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceOpticalFlowPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceOpticalFlowPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceOpticalFlowPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV::default(), supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV::default(), hint_supported: Bool32::default(), cost_supported: Bool32::default(), bidirectional_flow_supported: Bool32::default(), global_flow_supported: Bool32::default(), min_width: u32::default(), min_height: u32::default(), max_width: u32::default(), max_height: u32::default(), max_num_regions_of_interest: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceOpticalFlowPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceOpticalFlowPropertiesNV<'_> {} impl<'a> PhysicalDeviceOpticalFlowPropertiesNV<'a> { #[inline] pub fn supported_output_grid_sizes( mut self, supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV, ) -> Self { self.supported_output_grid_sizes = supported_output_grid_sizes; self } #[inline] pub fn supported_hint_grid_sizes( mut self, supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV, ) -> Self { self.supported_hint_grid_sizes = supported_hint_grid_sizes; self } #[inline] pub fn hint_supported(mut self, hint_supported: bool) -> Self { self.hint_supported = hint_supported.into(); self } #[inline] pub fn cost_supported(mut self, cost_supported: bool) -> Self { self.cost_supported = cost_supported.into(); self } #[inline] pub fn bidirectional_flow_supported(mut self, bidirectional_flow_supported: bool) -> Self { self.bidirectional_flow_supported = bidirectional_flow_supported.into(); self } #[inline] pub fn global_flow_supported(mut self, global_flow_supported: bool) -> Self { self.global_flow_supported = global_flow_supported.into(); self } #[inline] pub fn min_width(mut self, min_width: u32) -> Self { self.min_width = min_width; self } #[inline] pub fn min_height(mut self, min_height: u32) -> Self { self.min_height = min_height; self } #[inline] pub fn max_width(mut self, max_width: u32) -> Self { self.max_width = max_width; self } #[inline] pub fn max_height(mut self, max_height: u32) -> Self { self.max_height = max_height; self } #[inline] pub fn max_num_regions_of_interest(mut self, max_num_regions_of_interest: u32) -> Self { self.max_num_regions_of_interest = max_num_regions_of_interest; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpticalFlowImageFormatInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub usage: OpticalFlowUsageFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpticalFlowImageFormatInfoNV<'_> {} unsafe impl Sync for OpticalFlowImageFormatInfoNV<'_> {} impl ::core::default::Default for OpticalFlowImageFormatInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), usage: OpticalFlowUsageFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpticalFlowImageFormatInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV; } unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for OpticalFlowImageFormatInfoNV<'_> {} unsafe impl ExtendsImageCreateInfo for OpticalFlowImageFormatInfoNV<'_> {} impl<'a> OpticalFlowImageFormatInfoNV<'a> { #[inline] pub fn usage(mut self, usage: OpticalFlowUsageFlagsNV) -> Self { self.usage = usage; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpticalFlowImageFormatPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpticalFlowImageFormatPropertiesNV<'_> {} unsafe impl Sync for OpticalFlowImageFormatPropertiesNV<'_> {} impl ::core::default::Default for OpticalFlowImageFormatPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpticalFlowImageFormatPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV; } impl<'a> OpticalFlowImageFormatPropertiesNV<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpticalFlowSessionCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub width: u32, pub height: u32, pub image_format: Format, pub flow_vector_format: Format, pub cost_format: Format, pub output_grid_size: OpticalFlowGridSizeFlagsNV, pub hint_grid_size: OpticalFlowGridSizeFlagsNV, pub performance_level: OpticalFlowPerformanceLevelNV, pub flags: OpticalFlowSessionCreateFlagsNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpticalFlowSessionCreateInfoNV<'_> {} unsafe impl Sync for OpticalFlowSessionCreateInfoNV<'_> {} impl ::core::default::Default for OpticalFlowSessionCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), width: u32::default(), height: u32::default(), image_format: Format::default(), flow_vector_format: Format::default(), cost_format: Format::default(), output_grid_size: OpticalFlowGridSizeFlagsNV::default(), hint_grid_size: OpticalFlowGridSizeFlagsNV::default(), performance_level: OpticalFlowPerformanceLevelNV::default(), flags: OpticalFlowSessionCreateFlagsNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpticalFlowSessionCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_SESSION_CREATE_INFO_NV; } pub unsafe trait ExtendsOpticalFlowSessionCreateInfoNV {} impl<'a> OpticalFlowSessionCreateInfoNV<'a> { #[inline] pub fn width(mut self, width: u32) -> Self { self.width = width; self } #[inline] pub fn height(mut self, height: u32) -> Self { self.height = height; self } #[inline] pub fn image_format(mut self, image_format: Format) -> Self { self.image_format = image_format; self } #[inline] pub fn flow_vector_format(mut self, flow_vector_format: Format) -> Self { self.flow_vector_format = flow_vector_format; self } #[inline] pub fn cost_format(mut self, cost_format: Format) -> Self { self.cost_format = cost_format; self } #[inline] pub fn output_grid_size(mut self, output_grid_size: OpticalFlowGridSizeFlagsNV) -> Self { self.output_grid_size = output_grid_size; self } #[inline] pub fn hint_grid_size(mut self, hint_grid_size: OpticalFlowGridSizeFlagsNV) -> Self { self.hint_grid_size = hint_grid_size; self } #[inline] pub fn performance_level(mut self, performance_level: OpticalFlowPerformanceLevelNV) -> Self { self.performance_level = performance_level; self } #[inline] pub fn flags(mut self, flags: OpticalFlowSessionCreateFlagsNV) -> Self { self.flags = flags; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpticalFlowSessionCreatePrivateDataInfoNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub id: u32, pub size: u32, pub p_private_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpticalFlowSessionCreatePrivateDataInfoNV<'_> {} unsafe impl Sync for OpticalFlowSessionCreatePrivateDataInfoNV<'_> {} impl ::core::default::Default for OpticalFlowSessionCreatePrivateDataInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), id: u32::default(), size: u32::default(), p_private_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpticalFlowSessionCreatePrivateDataInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV; } unsafe impl ExtendsOpticalFlowSessionCreateInfoNV for OpticalFlowSessionCreatePrivateDataInfoNV<'_> { } impl<'a> OpticalFlowSessionCreatePrivateDataInfoNV<'a> { #[inline] pub fn id(mut self, id: u32) -> Self { self.id = id; self } #[inline] pub fn size(mut self, size: u32) -> Self { self.size = size; self } #[inline] pub fn private_data(mut self, private_data: *const c_void) -> Self { self.p_private_data = private_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OpticalFlowExecuteInfoNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: OpticalFlowExecuteFlagsNV, pub region_count: u32, pub p_regions: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OpticalFlowExecuteInfoNV<'_> {} unsafe impl Sync for OpticalFlowExecuteInfoNV<'_> {} impl ::core::default::Default for OpticalFlowExecuteInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: OpticalFlowExecuteFlagsNV::default(), region_count: u32::default(), p_regions: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OpticalFlowExecuteInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_EXECUTE_INFO_NV; } impl<'a> OpticalFlowExecuteInfoNV<'a> { #[inline] pub fn flags(mut self, flags: OpticalFlowExecuteFlagsNV) -> Self { self.flags = flags; self } #[inline] pub fn regions(mut self, regions: &'a [Rect2D]) -> Self { self.region_count = regions.len() as _; self.p_regions = regions.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFaultFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub device_fault: Bool32, pub device_fault_vendor_binary: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFaultFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFaultFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFaultFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), device_fault: Bool32::default(), device_fault_vendor_binary: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFaultFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FAULT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFaultFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFaultFeaturesEXT<'_> {} impl<'a> PhysicalDeviceFaultFeaturesEXT<'a> { #[inline] pub fn device_fault(mut self, device_fault: bool) -> Self { self.device_fault = device_fault.into(); self } #[inline] pub fn device_fault_vendor_binary(mut self, device_fault_vendor_binary: bool) -> Self { self.device_fault_vendor_binary = device_fault_vendor_binary.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DeviceFaultAddressInfoEXT { pub address_type: DeviceFaultAddressTypeEXT, pub reported_address: DeviceAddress, pub address_precision: DeviceSize, } impl DeviceFaultAddressInfoEXT { #[inline] pub fn address_type(mut self, address_type: DeviceFaultAddressTypeEXT) -> Self { self.address_type = address_type; self } #[inline] pub fn reported_address(mut self, reported_address: DeviceAddress) -> Self { self.reported_address = reported_address; self } #[inline] pub fn address_precision(mut self, address_precision: DeviceSize) -> Self { self.address_precision = address_precision; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceFaultVendorInfoEXT { pub description: [c_char; MAX_DESCRIPTION_SIZE], pub vendor_fault_code: u64, pub vendor_fault_data: u64, } #[cfg(feature = "debug")] impl fmt::Debug for DeviceFaultVendorInfoEXT { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DeviceFaultVendorInfoEXT") .field("description", &self.description_as_c_str()) .field("vendor_fault_code", &self.vendor_fault_code) .field("vendor_fault_data", &self.vendor_fault_data) .finish() } } impl ::core::default::Default for DeviceFaultVendorInfoEXT { #[inline] fn default() -> Self { Self { description: unsafe { ::core::mem::zeroed() }, vendor_fault_code: u64::default(), vendor_fault_data: u64::default(), } } } impl DeviceFaultVendorInfoEXT { #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn vendor_fault_code(mut self, vendor_fault_code: u64) -> Self { self.vendor_fault_code = vendor_fault_code; self } #[inline] pub fn vendor_fault_data(mut self, vendor_fault_data: u64) -> Self { self.vendor_fault_data = vendor_fault_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceFaultCountsEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub address_info_count: u32, pub vendor_info_count: u32, pub vendor_binary_size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceFaultCountsEXT<'_> {} unsafe impl Sync for DeviceFaultCountsEXT<'_> {} impl ::core::default::Default for DeviceFaultCountsEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), address_info_count: u32::default(), vendor_info_count: u32::default(), vendor_binary_size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceFaultCountsEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_FAULT_COUNTS_EXT; } impl<'a> DeviceFaultCountsEXT<'a> { #[inline] pub fn address_info_count(mut self, address_info_count: u32) -> Self { self.address_info_count = address_info_count; self } #[inline] pub fn vendor_info_count(mut self, vendor_info_count: u32) -> Self { self.vendor_info_count = vendor_info_count; self } #[inline] pub fn vendor_binary_size(mut self, vendor_binary_size: DeviceSize) -> Self { self.vendor_binary_size = vendor_binary_size; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceFaultInfoEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub description: [c_char; MAX_DESCRIPTION_SIZE], pub p_address_infos: *mut DeviceFaultAddressInfoEXT, pub p_vendor_infos: *mut DeviceFaultVendorInfoEXT, pub p_vendor_binary_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceFaultInfoEXT<'_> {} unsafe impl Sync for DeviceFaultInfoEXT<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DeviceFaultInfoEXT<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DeviceFaultInfoEXT") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("description", &self.description_as_c_str()) .field("p_address_infos", &self.p_address_infos) .field("p_vendor_infos", &self.p_vendor_infos) .field("p_vendor_binary_data", &self.p_vendor_binary_data) .finish() } } impl ::core::default::Default for DeviceFaultInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), description: unsafe { ::core::mem::zeroed() }, p_address_infos: ::core::ptr::null_mut(), p_vendor_infos: ::core::ptr::null_mut(), p_vendor_binary_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceFaultInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_FAULT_INFO_EXT; } impl<'a> DeviceFaultInfoEXT<'a> { #[inline] pub fn description( mut self, description: &CStr, ) -> core::result::Result { write_c_str_slice_with_nul(&mut self.description, description).map(|()| self) } #[inline] pub fn description_as_c_str(&self) -> core::result::Result<&CStr, FromBytesUntilNulError> { wrap_c_str_slice_until_nul(&self.description) } #[inline] pub fn address_infos(mut self, address_infos: &'a mut DeviceFaultAddressInfoEXT) -> Self { self.p_address_infos = address_infos; self } #[inline] pub fn vendor_infos(mut self, vendor_infos: &'a mut DeviceFaultVendorInfoEXT) -> Self { self.p_vendor_infos = vendor_infos; self } #[inline] pub fn vendor_binary_data(mut self, vendor_binary_data: *mut c_void) -> Self { self.p_vendor_binary_data = vendor_binary_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceFaultVendorBinaryHeaderVersionOneEXT { pub header_size: u32, pub header_version: DeviceFaultVendorBinaryHeaderVersionEXT, pub vendor_id: u32, pub device_id: u32, pub driver_version: u32, pub pipeline_cache_uuid: [u8; UUID_SIZE], pub application_name_offset: u32, pub application_version: u32, pub engine_name_offset: u32, pub engine_version: u32, pub api_version: u32, } impl ::core::default::Default for DeviceFaultVendorBinaryHeaderVersionOneEXT { #[inline] fn default() -> Self { Self { header_size: u32::default(), header_version: DeviceFaultVendorBinaryHeaderVersionEXT::default(), vendor_id: u32::default(), device_id: u32::default(), driver_version: u32::default(), pipeline_cache_uuid: unsafe { ::core::mem::zeroed() }, application_name_offset: u32::default(), application_version: u32::default(), engine_name_offset: u32::default(), engine_version: u32::default(), api_version: u32::default(), } } } impl DeviceFaultVendorBinaryHeaderVersionOneEXT { #[inline] pub fn header_size(mut self, header_size: u32) -> Self { self.header_size = header_size; self } #[inline] pub fn header_version( mut self, header_version: DeviceFaultVendorBinaryHeaderVersionEXT, ) -> Self { self.header_version = header_version; self } #[inline] pub fn vendor_id(mut self, vendor_id: u32) -> Self { self.vendor_id = vendor_id; self } #[inline] pub fn device_id(mut self, device_id: u32) -> Self { self.device_id = device_id; self } #[inline] pub fn driver_version(mut self, driver_version: u32) -> Self { self.driver_version = driver_version; self } #[inline] pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self { self.pipeline_cache_uuid = pipeline_cache_uuid; self } #[inline] pub fn application_name_offset(mut self, application_name_offset: u32) -> Self { self.application_name_offset = application_name_offset; self } #[inline] pub fn application_version(mut self, application_version: u32) -> Self { self.application_version = application_version; self } #[inline] pub fn engine_name_offset(mut self, engine_name_offset: u32) -> Self { self.engine_name_offset = engine_name_offset; self } #[inline] pub fn engine_version(mut self, engine_version: u32) -> Self { self.engine_version = engine_version; self } #[inline] pub fn api_version(mut self, api_version: u32) -> Self { self.api_version = api_version; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pipeline_library_group_handles: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pipeline_library_group_handles: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'_> {} impl<'a> PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT<'a> { #[inline] pub fn pipeline_library_group_handles(mut self, pipeline_library_group_handles: bool) -> Self { self.pipeline_library_group_handles = pipeline_library_group_handles.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DepthBiasInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub depth_bias_constant_factor: f32, pub depth_bias_clamp: f32, pub depth_bias_slope_factor: f32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DepthBiasInfoEXT<'_> {} unsafe impl Sync for DepthBiasInfoEXT<'_> {} impl ::core::default::Default for DepthBiasInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), depth_bias_constant_factor: f32::default(), depth_bias_clamp: f32::default(), depth_bias_slope_factor: f32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DepthBiasInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEPTH_BIAS_INFO_EXT; } pub unsafe trait ExtendsDepthBiasInfoEXT {} impl<'a> DepthBiasInfoEXT<'a> { #[inline] pub fn depth_bias_constant_factor(mut self, depth_bias_constant_factor: f32) -> Self { self.depth_bias_constant_factor = depth_bias_constant_factor; self } #[inline] pub fn depth_bias_clamp(mut self, depth_bias_clamp: f32) -> Self { self.depth_bias_clamp = depth_bias_clamp; self } #[inline] pub fn depth_bias_slope_factor(mut self, depth_bias_slope_factor: f32) -> Self { self.depth_bias_slope_factor = depth_bias_slope_factor; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DepthBiasRepresentationInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub depth_bias_representation: DepthBiasRepresentationEXT, pub depth_bias_exact: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DepthBiasRepresentationInfoEXT<'_> {} unsafe impl Sync for DepthBiasRepresentationInfoEXT<'_> {} impl ::core::default::Default for DepthBiasRepresentationInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), depth_bias_representation: DepthBiasRepresentationEXT::default(), depth_bias_exact: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DepthBiasRepresentationInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEPTH_BIAS_REPRESENTATION_INFO_EXT; } unsafe impl ExtendsDepthBiasInfoEXT for DepthBiasRepresentationInfoEXT<'_> {} unsafe impl ExtendsPipelineRasterizationStateCreateInfo for DepthBiasRepresentationInfoEXT<'_> {} impl<'a> DepthBiasRepresentationInfoEXT<'a> { #[inline] pub fn depth_bias_representation( mut self, depth_bias_representation: DepthBiasRepresentationEXT, ) -> Self { self.depth_bias_representation = depth_bias_representation; self } #[inline] pub fn depth_bias_exact(mut self, depth_bias_exact: bool) -> Self { self.depth_bias_exact = depth_bias_exact.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DecompressMemoryRegionNV { pub src_address: DeviceAddress, pub dst_address: DeviceAddress, pub compressed_size: DeviceSize, pub decompressed_size: DeviceSize, pub decompression_method: MemoryDecompressionMethodFlagsNV, } impl DecompressMemoryRegionNV { #[inline] pub fn src_address(mut self, src_address: DeviceAddress) -> Self { self.src_address = src_address; self } #[inline] pub fn dst_address(mut self, dst_address: DeviceAddress) -> Self { self.dst_address = dst_address; self } #[inline] pub fn compressed_size(mut self, compressed_size: DeviceSize) -> Self { self.compressed_size = compressed_size; self } #[inline] pub fn decompressed_size(mut self, decompressed_size: DeviceSize) -> Self { self.decompressed_size = decompressed_size; self } #[inline] pub fn decompression_method( mut self, decompression_method: MemoryDecompressionMethodFlagsNV, ) -> Self { self.decompression_method = decompression_method; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_core_mask: u64, pub shader_core_count: u32, pub shader_warps_per_core: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'_> {} unsafe impl Sync for PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'_> {} impl ::core::default::Default for PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_core_mask: u64::default(), shader_core_count: u32::default(), shader_warps_per_core: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'_> {} impl<'a> PhysicalDeviceShaderCoreBuiltinsPropertiesARM<'a> { #[inline] pub fn shader_core_mask(mut self, shader_core_mask: u64) -> Self { self.shader_core_mask = shader_core_mask; self } #[inline] pub fn shader_core_count(mut self, shader_core_count: u32) -> Self { self.shader_core_count = shader_core_count; self } #[inline] pub fn shader_warps_per_core(mut self, shader_warps_per_core: u32) -> Self { self.shader_warps_per_core = shader_warps_per_core; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_core_builtins: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'_> {} unsafe impl Sync for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'_> {} impl ::core::default::Default for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_core_builtins: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'_> {} impl<'a> PhysicalDeviceShaderCoreBuiltinsFeaturesARM<'a> { #[inline] pub fn shader_core_builtins(mut self, shader_core_builtins: bool) -> Self { self.shader_core_builtins = shader_core_builtins.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct FrameBoundaryEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: FrameBoundaryFlagsEXT, pub frame_id: u64, pub image_count: u32, pub p_images: *const Image, pub buffer_count: u32, pub p_buffers: *const Buffer, pub tag_name: u64, pub tag_size: usize, pub p_tag: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for FrameBoundaryEXT<'_> {} unsafe impl Sync for FrameBoundaryEXT<'_> {} impl ::core::default::Default for FrameBoundaryEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: FrameBoundaryFlagsEXT::default(), frame_id: u64::default(), image_count: u32::default(), p_images: ::core::ptr::null(), buffer_count: u32::default(), p_buffers: ::core::ptr::null(), tag_name: u64::default(), tag_size: usize::default(), p_tag: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for FrameBoundaryEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::FRAME_BOUNDARY_EXT; } unsafe impl ExtendsSubmitInfo for FrameBoundaryEXT<'_> {} unsafe impl ExtendsSubmitInfo2 for FrameBoundaryEXT<'_> {} unsafe impl ExtendsPresentInfoKHR for FrameBoundaryEXT<'_> {} unsafe impl ExtendsBindSparseInfo for FrameBoundaryEXT<'_> {} impl<'a> FrameBoundaryEXT<'a> { #[inline] pub fn flags(mut self, flags: FrameBoundaryFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn frame_id(mut self, frame_id: u64) -> Self { self.frame_id = frame_id; self } #[inline] pub fn images(mut self, images: &'a [Image]) -> Self { self.image_count = images.len() as _; self.p_images = images.as_ptr(); self } #[inline] pub fn buffers(mut self, buffers: &'a [Buffer]) -> Self { self.buffer_count = buffers.len() as _; self.p_buffers = buffers.as_ptr(); self } #[inline] pub fn tag_name(mut self, tag_name: u64) -> Self { self.tag_name = tag_name; self } #[inline] pub fn tag(mut self, tag: &'a [u8]) -> Self { self.tag_size = tag.len(); self.p_tag = tag.as_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceFrameBoundaryFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub frame_boundary: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceFrameBoundaryFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceFrameBoundaryFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceFrameBoundaryFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), frame_boundary: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceFrameBoundaryFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFrameBoundaryFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFrameBoundaryFeaturesEXT<'_> {} impl<'a> PhysicalDeviceFrameBoundaryFeaturesEXT<'a> { #[inline] pub fn frame_boundary(mut self, frame_boundary: bool) -> Self { self.frame_boundary = frame_boundary.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub dynamic_rendering_unused_attachments: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), dynamic_rendering_unused_attachments: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'_> { } impl<'a> PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT<'a> { #[inline] pub fn dynamic_rendering_unused_attachments( mut self, dynamic_rendering_unused_attachments: bool, ) -> Self { self.dynamic_rendering_unused_attachments = dynamic_rendering_unused_attachments.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfacePresentModeEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_mode: PresentModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfacePresentModeEXT<'_> {} unsafe impl Sync for SurfacePresentModeEXT<'_> {} impl ::core::default::Default for SurfacePresentModeEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_mode: PresentModeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfacePresentModeEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_MODE_EXT; } unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfacePresentModeEXT<'_> {} impl<'a> SurfacePresentModeEXT<'a> { #[inline] pub fn present_mode(mut self, present_mode: PresentModeKHR) -> Self { self.present_mode = present_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfacePresentScalingCapabilitiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub supported_present_scaling: PresentScalingFlagsEXT, pub supported_present_gravity_x: PresentGravityFlagsEXT, pub supported_present_gravity_y: PresentGravityFlagsEXT, pub min_scaled_image_extent: Extent2D, pub max_scaled_image_extent: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfacePresentScalingCapabilitiesEXT<'_> {} unsafe impl Sync for SurfacePresentScalingCapabilitiesEXT<'_> {} impl ::core::default::Default for SurfacePresentScalingCapabilitiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), supported_present_scaling: PresentScalingFlagsEXT::default(), supported_present_gravity_x: PresentGravityFlagsEXT::default(), supported_present_gravity_y: PresentGravityFlagsEXT::default(), min_scaled_image_extent: Extent2D::default(), max_scaled_image_extent: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfacePresentScalingCapabilitiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_SCALING_CAPABILITIES_EXT; } unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentScalingCapabilitiesEXT<'_> {} impl<'a> SurfacePresentScalingCapabilitiesEXT<'a> { #[inline] pub fn supported_present_scaling( mut self, supported_present_scaling: PresentScalingFlagsEXT, ) -> Self { self.supported_present_scaling = supported_present_scaling; self } #[inline] pub fn supported_present_gravity_x( mut self, supported_present_gravity_x: PresentGravityFlagsEXT, ) -> Self { self.supported_present_gravity_x = supported_present_gravity_x; self } #[inline] pub fn supported_present_gravity_y( mut self, supported_present_gravity_y: PresentGravityFlagsEXT, ) -> Self { self.supported_present_gravity_y = supported_present_gravity_y; self } #[inline] pub fn min_scaled_image_extent(mut self, min_scaled_image_extent: Extent2D) -> Self { self.min_scaled_image_extent = min_scaled_image_extent; self } #[inline] pub fn max_scaled_image_extent(mut self, max_scaled_image_extent: Extent2D) -> Self { self.max_scaled_image_extent = max_scaled_image_extent; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SurfacePresentModeCompatibilityEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub present_mode_count: u32, pub p_present_modes: *mut PresentModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SurfacePresentModeCompatibilityEXT<'_> {} unsafe impl Sync for SurfacePresentModeCompatibilityEXT<'_> {} impl ::core::default::Default for SurfacePresentModeCompatibilityEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), present_mode_count: u32::default(), p_present_modes: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SurfacePresentModeCompatibilityEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_MODE_COMPATIBILITY_EXT; } unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentModeCompatibilityEXT<'_> {} impl<'a> SurfacePresentModeCompatibilityEXT<'a> { #[inline] pub fn present_modes(mut self, present_modes: &'a mut [PresentModeKHR]) -> Self { self.present_mode_count = present_modes.len() as _; self.p_present_modes = present_modes.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub swapchain_maintenance1: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), swapchain_maintenance1: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'_> {} impl<'a> PhysicalDeviceSwapchainMaintenance1FeaturesEXT<'a> { #[inline] pub fn swapchain_maintenance1(mut self, swapchain_maintenance1: bool) -> Self { self.swapchain_maintenance1 = swapchain_maintenance1.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainPresentFenceInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_fences: *const Fence, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainPresentFenceInfoEXT<'_> {} unsafe impl Sync for SwapchainPresentFenceInfoEXT<'_> {} impl ::core::default::Default for SwapchainPresentFenceInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_fences: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainPresentFenceInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_FENCE_INFO_EXT; } unsafe impl ExtendsPresentInfoKHR for SwapchainPresentFenceInfoEXT<'_> {} impl<'a> SwapchainPresentFenceInfoEXT<'a> { #[inline] pub fn fences(mut self, fences: &'a [Fence]) -> Self { self.swapchain_count = fences.len() as _; self.p_fences = fences.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainPresentModesCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub present_mode_count: u32, pub p_present_modes: *const PresentModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainPresentModesCreateInfoEXT<'_> {} unsafe impl Sync for SwapchainPresentModesCreateInfoEXT<'_> {} impl ::core::default::Default for SwapchainPresentModesCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), present_mode_count: u32::default(), p_present_modes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainPresentModesCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentModesCreateInfoEXT<'_> {} impl<'a> SwapchainPresentModesCreateInfoEXT<'a> { #[inline] pub fn present_modes(mut self, present_modes: &'a [PresentModeKHR]) -> Self { self.present_mode_count = present_modes.len() as _; self.p_present_modes = present_modes.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainPresentModeInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain_count: u32, pub p_present_modes: *const PresentModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainPresentModeInfoEXT<'_> {} unsafe impl Sync for SwapchainPresentModeInfoEXT<'_> {} impl ::core::default::Default for SwapchainPresentModeInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain_count: u32::default(), p_present_modes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainPresentModeInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_MODE_INFO_EXT; } unsafe impl ExtendsPresentInfoKHR for SwapchainPresentModeInfoEXT<'_> {} impl<'a> SwapchainPresentModeInfoEXT<'a> { #[inline] pub fn present_modes(mut self, present_modes: &'a [PresentModeKHR]) -> Self { self.swapchain_count = present_modes.len() as _; self.p_present_modes = present_modes.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainPresentScalingCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub scaling_behavior: PresentScalingFlagsEXT, pub present_gravity_x: PresentGravityFlagsEXT, pub present_gravity_y: PresentGravityFlagsEXT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainPresentScalingCreateInfoEXT<'_> {} unsafe impl Sync for SwapchainPresentScalingCreateInfoEXT<'_> {} impl ::core::default::Default for SwapchainPresentScalingCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), scaling_behavior: PresentScalingFlagsEXT::default(), present_gravity_x: PresentGravityFlagsEXT::default(), present_gravity_y: PresentGravityFlagsEXT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainPresentScalingCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentScalingCreateInfoEXT<'_> {} impl<'a> SwapchainPresentScalingCreateInfoEXT<'a> { #[inline] pub fn scaling_behavior(mut self, scaling_behavior: PresentScalingFlagsEXT) -> Self { self.scaling_behavior = scaling_behavior; self } #[inline] pub fn present_gravity_x(mut self, present_gravity_x: PresentGravityFlagsEXT) -> Self { self.present_gravity_x = present_gravity_x; self } #[inline] pub fn present_gravity_y(mut self, present_gravity_y: PresentGravityFlagsEXT) -> Self { self.present_gravity_y = present_gravity_y; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ReleaseSwapchainImagesInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub swapchain: SwapchainKHR, pub image_index_count: u32, pub p_image_indices: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ReleaseSwapchainImagesInfoEXT<'_> {} unsafe impl Sync for ReleaseSwapchainImagesInfoEXT<'_> {} impl ::core::default::Default for ReleaseSwapchainImagesInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), swapchain: SwapchainKHR::default(), image_index_count: u32::default(), p_image_indices: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ReleaseSwapchainImagesInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RELEASE_SWAPCHAIN_IMAGES_INFO_EXT; } impl<'a> ReleaseSwapchainImagesInfoEXT<'a> { #[inline] pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self { self.swapchain = swapchain; self } #[inline] pub fn image_indices(mut self, image_indices: &'a [u32]) -> Self { self.image_index_count = image_indices.len() as _; self.p_image_indices = image_indices.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDepthBiasControlFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub depth_bias_control: Bool32, pub least_representable_value_force_unorm_representation: Bool32, pub float_representation: Bool32, pub depth_bias_exact: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDepthBiasControlFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceDepthBiasControlFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceDepthBiasControlFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), depth_bias_control: Bool32::default(), least_representable_value_force_unorm_representation: Bool32::default(), float_representation: Bool32::default(), depth_bias_exact: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDepthBiasControlFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthBiasControlFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthBiasControlFeaturesEXT<'_> {} impl<'a> PhysicalDeviceDepthBiasControlFeaturesEXT<'a> { #[inline] pub fn depth_bias_control(mut self, depth_bias_control: bool) -> Self { self.depth_bias_control = depth_bias_control.into(); self } #[inline] pub fn least_representable_value_force_unorm_representation( mut self, least_representable_value_force_unorm_representation: bool, ) -> Self { self.least_representable_value_force_unorm_representation = least_representable_value_force_unorm_representation.into(); self } #[inline] pub fn float_representation(mut self, float_representation: bool) -> Self { self.float_representation = float_representation.into(); self } #[inline] pub fn depth_bias_exact(mut self, depth_bias_exact: bool) -> Self { self.depth_bias_exact = depth_bias_exact.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_invocation_reorder: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_invocation_reorder: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'_> {} impl<'a> PhysicalDeviceRayTracingInvocationReorderFeaturesNV<'a> { #[inline] pub fn ray_tracing_invocation_reorder(mut self, ray_tracing_invocation_reorder: bool) -> Self { self.ray_tracing_invocation_reorder = ray_tracing_invocation_reorder.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_invocation_reorder_reordering_hint: RayTracingInvocationReorderModeNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_invocation_reorder_reordering_hint: RayTracingInvocationReorderModeNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'_> { } impl<'a> PhysicalDeviceRayTracingInvocationReorderPropertiesNV<'a> { #[inline] pub fn ray_tracing_invocation_reorder_reordering_hint( mut self, ray_tracing_invocation_reorder_reordering_hint: RayTracingInvocationReorderModeNV, ) -> Self { self.ray_tracing_invocation_reorder_reordering_hint = ray_tracing_invocation_reorder_reordering_hint; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub extended_sparse_address_space: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), extended_sparse_address_space: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'_> {} impl<'a> PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV<'a> { #[inline] pub fn extended_sparse_address_space(mut self, extended_sparse_address_space: bool) -> Self { self.extended_sparse_address_space = extended_sparse_address_space.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub extended_sparse_address_space_size: DeviceSize, pub extended_sparse_image_usage_flags: ImageUsageFlags, pub extended_sparse_buffer_usage_flags: BufferUsageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), extended_sparse_address_space_size: DeviceSize::default(), extended_sparse_image_usage_flags: ImageUsageFlags::default(), extended_sparse_buffer_usage_flags: BufferUsageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'_> { } impl<'a> PhysicalDeviceExtendedSparseAddressSpacePropertiesNV<'a> { #[inline] pub fn extended_sparse_address_space_size( mut self, extended_sparse_address_space_size: DeviceSize, ) -> Self { self.extended_sparse_address_space_size = extended_sparse_address_space_size; self } #[inline] pub fn extended_sparse_image_usage_flags( mut self, extended_sparse_image_usage_flags: ImageUsageFlags, ) -> Self { self.extended_sparse_image_usage_flags = extended_sparse_image_usage_flags; self } #[inline] pub fn extended_sparse_buffer_usage_flags( mut self, extended_sparse_buffer_usage_flags: BufferUsageFlags, ) -> Self { self.extended_sparse_buffer_usage_flags = extended_sparse_buffer_usage_flags; self } } #[repr(C)] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DirectDriverLoadingInfoLUNARG<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub flags: DirectDriverLoadingFlagsLUNARG, pub pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DirectDriverLoadingInfoLUNARG<'_> {} unsafe impl Sync for DirectDriverLoadingInfoLUNARG<'_> {} #[cfg(feature = "debug")] impl fmt::Debug for DirectDriverLoadingInfoLUNARG<'_> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DirectDriverLoadingInfoLUNARG") .field("s_type", &self.s_type) .field("p_next", &self.p_next) .field("flags", &self.flags) .field( "pfn_get_instance_proc_addr", &(self.pfn_get_instance_proc_addr.map(|x| x as *const ())), ) .finish() } } impl ::core::default::Default for DirectDriverLoadingInfoLUNARG<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), flags: DirectDriverLoadingFlagsLUNARG::default(), pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DirectDriverLoadingInfoLUNARG<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DIRECT_DRIVER_LOADING_INFO_LUNARG; } impl<'a> DirectDriverLoadingInfoLUNARG<'a> { #[inline] pub fn flags(mut self, flags: DirectDriverLoadingFlagsLUNARG) -> Self { self.flags = flags; self } #[inline] pub fn pfn_get_instance_proc_addr( mut self, pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG, ) -> Self { self.pfn_get_instance_proc_addr = pfn_get_instance_proc_addr; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DirectDriverLoadingListLUNARG<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub mode: DirectDriverLoadingModeLUNARG, pub driver_count: u32, pub p_drivers: *const DirectDriverLoadingInfoLUNARG<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DirectDriverLoadingListLUNARG<'_> {} unsafe impl Sync for DirectDriverLoadingListLUNARG<'_> {} impl ::core::default::Default for DirectDriverLoadingListLUNARG<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), mode: DirectDriverLoadingModeLUNARG::default(), driver_count: u32::default(), p_drivers: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DirectDriverLoadingListLUNARG<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DIRECT_DRIVER_LOADING_LIST_LUNARG; } unsafe impl ExtendsInstanceCreateInfo for DirectDriverLoadingListLUNARG<'_> {} impl<'a> DirectDriverLoadingListLUNARG<'a> { #[inline] pub fn mode(mut self, mode: DirectDriverLoadingModeLUNARG) -> Self { self.mode = mode; self } #[inline] pub fn drivers(mut self, drivers: &'a [DirectDriverLoadingInfoLUNARG<'a>]) -> Self { self.driver_count = drivers.len() as _; self.p_drivers = drivers.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub multiview_per_view_viewports: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), multiview_per_view_viewports: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM<'a> { #[inline] pub fn multiview_per_view_viewports(mut self, multiview_per_view_viewports: bool) -> Self { self.multiview_per_view_viewports = multiview_per_view_viewports.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ray_tracing_position_fetch: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ray_tracing_position_fetch: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'_> {} impl<'a> PhysicalDeviceRayTracingPositionFetchFeaturesKHR<'a> { #[inline] pub fn ray_tracing_position_fetch(mut self, ray_tracing_position_fetch: bool) -> Self { self.ray_tracing_position_fetch = ray_tracing_position_fetch.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceImageSubresourceInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_create_info: *const ImageCreateInfo<'a>, pub p_subresource: *const ImageSubresource2KHR<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceImageSubresourceInfoKHR<'_> {} unsafe impl Sync for DeviceImageSubresourceInfoKHR<'_> {} impl ::core::default::Default for DeviceImageSubresourceInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_create_info: ::core::ptr::null(), p_subresource: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceImageSubresourceInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_IMAGE_SUBRESOURCE_INFO_KHR; } impl<'a> DeviceImageSubresourceInfoKHR<'a> { #[inline] pub fn create_info(mut self, create_info: &'a ImageCreateInfo<'a>) -> Self { self.p_create_info = create_info; self } #[inline] pub fn subresource(mut self, subresource: &'a ImageSubresource2KHR<'a>) -> Self { self.p_subresource = subresource; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderCorePropertiesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub pixel_rate: u32, pub texel_rate: u32, pub fma_rate: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderCorePropertiesARM<'_> {} unsafe impl Sync for PhysicalDeviceShaderCorePropertiesARM<'_> {} impl ::core::default::Default for PhysicalDeviceShaderCorePropertiesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), pixel_rate: u32::default(), texel_rate: u32::default(), fma_rate: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderCorePropertiesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesARM<'_> {} impl<'a> PhysicalDeviceShaderCorePropertiesARM<'a> { #[inline] pub fn pixel_rate(mut self, pixel_rate: u32) -> Self { self.pixel_rate = pixel_rate; self } #[inline] pub fn texel_rate(mut self, texel_rate: u32) -> Self { self.texel_rate = texel_rate; self } #[inline] pub fn fma_rate(mut self, fma_rate: u32) -> Self { self.fma_rate = fma_rate; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub multiview_per_view_render_areas: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), multiview_per_view_render_areas: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM<'a> { #[inline] pub fn multiview_per_view_render_areas( mut self, multiview_per_view_render_areas: bool, ) -> Self { self.multiview_per_view_render_areas = multiview_per_view_render_areas.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub per_view_render_area_count: u32, pub p_per_view_render_areas: *const Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'_> {} unsafe impl Sync for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'_> {} impl ::core::default::Default for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), per_view_render_area_count: u32::default(), p_per_view_render_areas: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM; } unsafe impl ExtendsRenderPassBeginInfo for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'_> {} unsafe impl ExtendsRenderingInfo for MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'_> {} impl<'a> MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM<'a> { #[inline] pub fn per_view_render_areas(mut self, per_view_render_areas: &'a [Rect2D]) -> Self { self.per_view_render_area_count = per_view_render_areas.len() as _; self.p_per_view_render_areas = per_view_render_areas.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct QueryLowLatencySupportNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_queried_low_latency_data: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for QueryLowLatencySupportNV<'_> {} unsafe impl Sync for QueryLowLatencySupportNV<'_> {} impl ::core::default::Default for QueryLowLatencySupportNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_queried_low_latency_data: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for QueryLowLatencySupportNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::QUERY_LOW_LATENCY_SUPPORT_NV; } unsafe impl ExtendsSemaphoreCreateInfo for QueryLowLatencySupportNV<'_> {} impl<'a> QueryLowLatencySupportNV<'a> { #[inline] pub fn queried_low_latency_data(mut self, queried_low_latency_data: *mut c_void) -> Self { self.p_queried_low_latency_data = queried_low_latency_data; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryMapInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: MemoryMapFlags, pub memory: DeviceMemory, pub offset: DeviceSize, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryMapInfoKHR<'_> {} unsafe impl Sync for MemoryMapInfoKHR<'_> {} impl ::core::default::Default for MemoryMapInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: MemoryMapFlags::default(), memory: DeviceMemory::default(), offset: DeviceSize::default(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryMapInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_MAP_INFO_KHR; } pub unsafe trait ExtendsMemoryMapInfoKHR {} impl<'a> MemoryMapInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: MemoryMapFlags) -> Self { self.flags = flags; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } #[inline] pub fn offset(mut self, offset: DeviceSize) -> Self { self.offset = offset; self } #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryUnmapInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: MemoryUnmapFlagsKHR, pub memory: DeviceMemory, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryUnmapInfoKHR<'_> {} unsafe impl Sync for MemoryUnmapInfoKHR<'_> {} impl ::core::default::Default for MemoryUnmapInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: MemoryUnmapFlagsKHR::default(), memory: DeviceMemory::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryUnmapInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_UNMAP_INFO_KHR; } impl<'a> MemoryUnmapInfoKHR<'a> { #[inline] pub fn flags(mut self, flags: MemoryUnmapFlagsKHR) -> Self { self.flags = flags; self } #[inline] pub fn memory(mut self, memory: DeviceMemory) -> Self { self.memory = memory; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderObjectFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_object: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderObjectFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderObjectFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderObjectFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_object: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderObjectFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderObjectFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderObjectFeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderObjectFeaturesEXT<'a> { #[inline] pub fn shader_object(mut self, shader_object: bool) -> Self { self.shader_object = shader_object.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderObjectPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_binary_uuid: [u8; UUID_SIZE], pub shader_binary_version: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderObjectPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderObjectPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderObjectPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_binary_uuid: unsafe { ::core::mem::zeroed() }, shader_binary_version: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderObjectPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderObjectPropertiesEXT<'_> {} impl<'a> PhysicalDeviceShaderObjectPropertiesEXT<'a> { #[inline] pub fn shader_binary_uuid(mut self, shader_binary_uuid: [u8; UUID_SIZE]) -> Self { self.shader_binary_uuid = shader_binary_uuid; self } #[inline] pub fn shader_binary_version(mut self, shader_binary_version: u32) -> Self { self.shader_binary_version = shader_binary_version; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ShaderCreateInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: ShaderCreateFlagsEXT, pub stage: ShaderStageFlags, pub next_stage: ShaderStageFlags, pub code_type: ShaderCodeTypeEXT, pub code_size: usize, pub p_code: *const c_void, pub p_name: *const c_char, pub set_layout_count: u32, pub p_set_layouts: *const DescriptorSetLayout, pub push_constant_range_count: u32, pub p_push_constant_ranges: *const PushConstantRange, pub p_specialization_info: *const SpecializationInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ShaderCreateInfoEXT<'_> {} unsafe impl Sync for ShaderCreateInfoEXT<'_> {} impl ::core::default::Default for ShaderCreateInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: ShaderCreateFlagsEXT::default(), stage: ShaderStageFlags::default(), next_stage: ShaderStageFlags::default(), code_type: ShaderCodeTypeEXT::default(), code_size: usize::default(), p_code: ::core::ptr::null(), p_name: ::core::ptr::null(), set_layout_count: u32::default(), p_set_layouts: ::core::ptr::null(), push_constant_range_count: u32::default(), p_push_constant_ranges: ::core::ptr::null(), p_specialization_info: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ShaderCreateInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SHADER_CREATE_INFO_EXT; } pub unsafe trait ExtendsShaderCreateInfoEXT {} impl<'a> ShaderCreateInfoEXT<'a> { #[inline] pub fn flags(mut self, flags: ShaderCreateFlagsEXT) -> Self { self.flags = flags; self } #[inline] pub fn stage(mut self, stage: ShaderStageFlags) -> Self { self.stage = stage; self } #[inline] pub fn next_stage(mut self, next_stage: ShaderStageFlags) -> Self { self.next_stage = next_stage; self } #[inline] pub fn code_type(mut self, code_type: ShaderCodeTypeEXT) -> Self { self.code_type = code_type; self } #[inline] pub fn code(mut self, code: &'a [u8]) -> Self { self.code_size = code.len(); self.p_code = code.as_ptr().cast(); self } #[inline] pub fn name(mut self, name: &'a CStr) -> Self { self.p_name = name.as_ptr(); self } #[inline] pub unsafe fn name_as_c_str(&self) -> Option<&CStr> { if self.p_name.is_null() { None } else { Some(CStr::from_ptr(self.p_name)) } } #[inline] pub fn set_layouts(mut self, set_layouts: &'a [DescriptorSetLayout]) -> Self { self.set_layout_count = set_layouts.len() as _; self.p_set_layouts = set_layouts.as_ptr(); self } #[inline] pub fn push_constant_ranges(mut self, push_constant_ranges: &'a [PushConstantRange]) -> Self { self.push_constant_range_count = push_constant_ranges.len() as _; self.p_push_constant_ranges = push_constant_ranges.as_ptr(); self } #[inline] pub fn specialization_info(mut self, specialization_info: &'a SpecializationInfo<'a>) -> Self { self.p_specialization_info = specialization_info; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderTileImageFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_tile_image_color_read_access: Bool32, pub shader_tile_image_depth_read_access: Bool32, pub shader_tile_image_stencil_read_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderTileImageFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderTileImageFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderTileImageFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_tile_image_color_read_access: Bool32::default(), shader_tile_image_depth_read_access: Bool32::default(), shader_tile_image_stencil_read_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderTileImageFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderTileImageFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderTileImageFeaturesEXT<'_> {} impl<'a> PhysicalDeviceShaderTileImageFeaturesEXT<'a> { #[inline] pub fn shader_tile_image_color_read_access( mut self, shader_tile_image_color_read_access: bool, ) -> Self { self.shader_tile_image_color_read_access = shader_tile_image_color_read_access.into(); self } #[inline] pub fn shader_tile_image_depth_read_access( mut self, shader_tile_image_depth_read_access: bool, ) -> Self { self.shader_tile_image_depth_read_access = shader_tile_image_depth_read_access.into(); self } #[inline] pub fn shader_tile_image_stencil_read_access( mut self, shader_tile_image_stencil_read_access: bool, ) -> Self { self.shader_tile_image_stencil_read_access = shader_tile_image_stencil_read_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderTileImagePropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_tile_image_coherent_read_accelerated: Bool32, pub shader_tile_image_read_sample_from_pixel_rate_invocation: Bool32, pub shader_tile_image_read_from_helper_invocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderTileImagePropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceShaderTileImagePropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceShaderTileImagePropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_tile_image_coherent_read_accelerated: Bool32::default(), shader_tile_image_read_sample_from_pixel_rate_invocation: Bool32::default(), shader_tile_image_read_from_helper_invocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderTileImagePropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderTileImagePropertiesEXT<'_> {} impl<'a> PhysicalDeviceShaderTileImagePropertiesEXT<'a> { #[inline] pub fn shader_tile_image_coherent_read_accelerated( mut self, shader_tile_image_coherent_read_accelerated: bool, ) -> Self { self.shader_tile_image_coherent_read_accelerated = shader_tile_image_coherent_read_accelerated.into(); self } #[inline] pub fn shader_tile_image_read_sample_from_pixel_rate_invocation( mut self, shader_tile_image_read_sample_from_pixel_rate_invocation: bool, ) -> Self { self.shader_tile_image_read_sample_from_pixel_rate_invocation = shader_tile_image_read_sample_from_pixel_rate_invocation.into(); self } #[inline] pub fn shader_tile_image_read_from_helper_invocation( mut self, shader_tile_image_read_from_helper_invocation: bool, ) -> Self { self.shader_tile_image_read_from_helper_invocation = shader_tile_image_read_from_helper_invocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ImportScreenBufferInfoQNX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub buffer: *mut _screen_buffer, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ImportScreenBufferInfoQNX<'_> {} unsafe impl Sync for ImportScreenBufferInfoQNX<'_> {} impl ::core::default::Default for ImportScreenBufferInfoQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), buffer: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ImportScreenBufferInfoQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SCREEN_BUFFER_INFO_QNX; } unsafe impl ExtendsMemoryAllocateInfo for ImportScreenBufferInfoQNX<'_> {} impl<'a> ImportScreenBufferInfoQNX<'a> { #[inline] pub fn buffer(mut self, buffer: &'a mut _screen_buffer) -> Self { self.buffer = buffer; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ScreenBufferPropertiesQNX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub allocation_size: DeviceSize, pub memory_type_bits: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ScreenBufferPropertiesQNX<'_> {} unsafe impl Sync for ScreenBufferPropertiesQNX<'_> {} impl ::core::default::Default for ScreenBufferPropertiesQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), allocation_size: DeviceSize::default(), memory_type_bits: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ScreenBufferPropertiesQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SCREEN_BUFFER_PROPERTIES_QNX; } pub unsafe trait ExtendsScreenBufferPropertiesQNX {} impl<'a> ScreenBufferPropertiesQNX<'a> { #[inline] pub fn allocation_size(mut self, allocation_size: DeviceSize) -> Self { self.allocation_size = allocation_size; self } #[inline] pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self { self.memory_type_bits = memory_type_bits; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*mut T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ScreenBufferFormatPropertiesQNX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub format: Format, pub external_format: u64, pub screen_usage: u64, pub format_features: FormatFeatureFlags, pub sampler_ycbcr_conversion_components: ComponentMapping, pub suggested_ycbcr_model: SamplerYcbcrModelConversion, pub suggested_ycbcr_range: SamplerYcbcrRange, pub suggested_x_chroma_offset: ChromaLocation, pub suggested_y_chroma_offset: ChromaLocation, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ScreenBufferFormatPropertiesQNX<'_> {} unsafe impl Sync for ScreenBufferFormatPropertiesQNX<'_> {} impl ::core::default::Default for ScreenBufferFormatPropertiesQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), format: Format::default(), external_format: u64::default(), screen_usage: u64::default(), format_features: FormatFeatureFlags::default(), sampler_ycbcr_conversion_components: ComponentMapping::default(), suggested_ycbcr_model: SamplerYcbcrModelConversion::default(), suggested_ycbcr_range: SamplerYcbcrRange::default(), suggested_x_chroma_offset: ChromaLocation::default(), suggested_y_chroma_offset: ChromaLocation::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ScreenBufferFormatPropertiesQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SCREEN_BUFFER_FORMAT_PROPERTIES_QNX; } unsafe impl ExtendsScreenBufferPropertiesQNX for ScreenBufferFormatPropertiesQNX<'_> {} impl<'a> ScreenBufferFormatPropertiesQNX<'a> { #[inline] pub fn format(mut self, format: Format) -> Self { self.format = format; self } #[inline] pub fn external_format(mut self, external_format: u64) -> Self { self.external_format = external_format; self } #[inline] pub fn screen_usage(mut self, screen_usage: u64) -> Self { self.screen_usage = screen_usage; self } #[inline] pub fn format_features(mut self, format_features: FormatFeatureFlags) -> Self { self.format_features = format_features; self } #[inline] pub fn sampler_ycbcr_conversion_components( mut self, sampler_ycbcr_conversion_components: ComponentMapping, ) -> Self { self.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components; self } #[inline] pub fn suggested_ycbcr_model( mut self, suggested_ycbcr_model: SamplerYcbcrModelConversion, ) -> Self { self.suggested_ycbcr_model = suggested_ycbcr_model; self } #[inline] pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self { self.suggested_ycbcr_range = suggested_ycbcr_range; self } #[inline] pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self { self.suggested_x_chroma_offset = suggested_x_chroma_offset; self } #[inline] pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self { self.suggested_y_chroma_offset = suggested_y_chroma_offset; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExternalFormatQNX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_format: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExternalFormatQNX<'_> {} unsafe impl Sync for ExternalFormatQNX<'_> {} impl ::core::default::Default for ExternalFormatQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_format: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExternalFormatQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_FORMAT_QNX; } unsafe impl ExtendsImageCreateInfo for ExternalFormatQNX<'_> {} unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatQNX<'_> {} impl<'a> ExternalFormatQNX<'a> { #[inline] pub fn external_format(mut self, external_format: u64) -> Self { self.external_format = external_format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub screen_buffer_import: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'_> {} unsafe impl Sync for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'_> {} impl ::core::default::Default for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), screen_buffer_import: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'_> {} impl<'a> PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX<'a> { #[inline] pub fn screen_buffer_import(mut self, screen_buffer_import: bool) -> Self { self.screen_buffer_import = screen_buffer_import.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCooperativeMatrixFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cooperative_matrix: Bool32, pub cooperative_matrix_robust_buffer_access: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCooperativeMatrixFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceCooperativeMatrixFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceCooperativeMatrixFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cooperative_matrix: Bool32::default(), cooperative_matrix_robust_buffer_access: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCooperativeMatrixFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCooperativeMatrixFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCooperativeMatrixFeaturesKHR<'_> {} impl<'a> PhysicalDeviceCooperativeMatrixFeaturesKHR<'a> { #[inline] pub fn cooperative_matrix(mut self, cooperative_matrix: bool) -> Self { self.cooperative_matrix = cooperative_matrix.into(); self } #[inline] pub fn cooperative_matrix_robust_buffer_access( mut self, cooperative_matrix_robust_buffer_access: bool, ) -> Self { self.cooperative_matrix_robust_buffer_access = cooperative_matrix_robust_buffer_access.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct CooperativeMatrixPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub m_size: u32, pub n_size: u32, pub k_size: u32, pub a_type: ComponentTypeKHR, pub b_type: ComponentTypeKHR, pub c_type: ComponentTypeKHR, pub result_type: ComponentTypeKHR, pub saturating_accumulation: Bool32, pub scope: ScopeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for CooperativeMatrixPropertiesKHR<'_> {} unsafe impl Sync for CooperativeMatrixPropertiesKHR<'_> {} impl ::core::default::Default for CooperativeMatrixPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), m_size: u32::default(), n_size: u32::default(), k_size: u32::default(), a_type: ComponentTypeKHR::default(), b_type: ComponentTypeKHR::default(), c_type: ComponentTypeKHR::default(), result_type: ComponentTypeKHR::default(), saturating_accumulation: Bool32::default(), scope: ScopeKHR::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for CooperativeMatrixPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::COOPERATIVE_MATRIX_PROPERTIES_KHR; } impl<'a> CooperativeMatrixPropertiesKHR<'a> { #[inline] pub fn m_size(mut self, m_size: u32) -> Self { self.m_size = m_size; self } #[inline] pub fn n_size(mut self, n_size: u32) -> Self { self.n_size = n_size; self } #[inline] pub fn k_size(mut self, k_size: u32) -> Self { self.k_size = k_size; self } #[inline] pub fn a_type(mut self, a_type: ComponentTypeKHR) -> Self { self.a_type = a_type; self } #[inline] pub fn b_type(mut self, b_type: ComponentTypeKHR) -> Self { self.b_type = b_type; self } #[inline] pub fn c_type(mut self, c_type: ComponentTypeKHR) -> Self { self.c_type = c_type; self } #[inline] pub fn result_type(mut self, result_type: ComponentTypeKHR) -> Self { self.result_type = result_type; self } #[inline] pub fn saturating_accumulation(mut self, saturating_accumulation: bool) -> Self { self.saturating_accumulation = saturating_accumulation.into(); self } #[inline] pub fn scope(mut self, scope: ScopeKHR) -> Self { self.scope = scope; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCooperativeMatrixPropertiesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cooperative_matrix_supported_stages: ShaderStageFlags, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCooperativeMatrixPropertiesKHR<'_> {} unsafe impl Sync for PhysicalDeviceCooperativeMatrixPropertiesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceCooperativeMatrixPropertiesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cooperative_matrix_supported_stages: ShaderStageFlags::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCooperativeMatrixPropertiesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCooperativeMatrixPropertiesKHR<'_> {} impl<'a> PhysicalDeviceCooperativeMatrixPropertiesKHR<'a> { #[inline] pub fn cooperative_matrix_supported_stages( mut self, cooperative_matrix_supported_stages: ShaderStageFlags, ) -> Self { self.cooperative_matrix_supported_stages = cooperative_matrix_supported_stages; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderEnqueuePropertiesAMDX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_execution_graph_depth: u32, pub max_execution_graph_shader_output_nodes: u32, pub max_execution_graph_shader_payload_size: u32, pub max_execution_graph_shader_payload_count: u32, pub execution_graph_dispatch_address_alignment: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderEnqueuePropertiesAMDX<'_> {} unsafe impl Sync for PhysicalDeviceShaderEnqueuePropertiesAMDX<'_> {} impl ::core::default::Default for PhysicalDeviceShaderEnqueuePropertiesAMDX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_execution_graph_depth: u32::default(), max_execution_graph_shader_output_nodes: u32::default(), max_execution_graph_shader_payload_size: u32::default(), max_execution_graph_shader_payload_count: u32::default(), execution_graph_dispatch_address_alignment: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderEnqueuePropertiesAMDX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderEnqueuePropertiesAMDX<'_> {} impl<'a> PhysicalDeviceShaderEnqueuePropertiesAMDX<'a> { #[inline] pub fn max_execution_graph_depth(mut self, max_execution_graph_depth: u32) -> Self { self.max_execution_graph_depth = max_execution_graph_depth; self } #[inline] pub fn max_execution_graph_shader_output_nodes( mut self, max_execution_graph_shader_output_nodes: u32, ) -> Self { self.max_execution_graph_shader_output_nodes = max_execution_graph_shader_output_nodes; self } #[inline] pub fn max_execution_graph_shader_payload_size( mut self, max_execution_graph_shader_payload_size: u32, ) -> Self { self.max_execution_graph_shader_payload_size = max_execution_graph_shader_payload_size; self } #[inline] pub fn max_execution_graph_shader_payload_count( mut self, max_execution_graph_shader_payload_count: u32, ) -> Self { self.max_execution_graph_shader_payload_count = max_execution_graph_shader_payload_count; self } #[inline] pub fn execution_graph_dispatch_address_alignment( mut self, execution_graph_dispatch_address_alignment: u32, ) -> Self { self.execution_graph_dispatch_address_alignment = execution_graph_dispatch_address_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderEnqueueFeaturesAMDX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_enqueue: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderEnqueueFeaturesAMDX<'_> {} unsafe impl Sync for PhysicalDeviceShaderEnqueueFeaturesAMDX<'_> {} impl ::core::default::Default for PhysicalDeviceShaderEnqueueFeaturesAMDX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_enqueue: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderEnqueueFeaturesAMDX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderEnqueueFeaturesAMDX<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderEnqueueFeaturesAMDX<'_> {} impl<'a> PhysicalDeviceShaderEnqueueFeaturesAMDX<'a> { #[inline] pub fn shader_enqueue(mut self, shader_enqueue: bool) -> Self { self.shader_enqueue = shader_enqueue.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExecutionGraphPipelineCreateInfoAMDX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub flags: PipelineCreateFlags, pub stage_count: u32, pub p_stages: *const PipelineShaderStageCreateInfo<'a>, pub p_library_info: *const PipelineLibraryCreateInfoKHR<'a>, pub layout: PipelineLayout, pub base_pipeline_handle: Pipeline, pub base_pipeline_index: i32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExecutionGraphPipelineCreateInfoAMDX<'_> {} unsafe impl Sync for ExecutionGraphPipelineCreateInfoAMDX<'_> {} impl ::core::default::Default for ExecutionGraphPipelineCreateInfoAMDX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), flags: PipelineCreateFlags::default(), stage_count: u32::default(), p_stages: ::core::ptr::null(), p_library_info: ::core::ptr::null(), layout: PipelineLayout::default(), base_pipeline_handle: Pipeline::default(), base_pipeline_index: i32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExecutionGraphPipelineCreateInfoAMDX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX; } pub unsafe trait ExtendsExecutionGraphPipelineCreateInfoAMDX {} impl<'a> ExecutionGraphPipelineCreateInfoAMDX<'a> { #[inline] pub fn flags(mut self, flags: PipelineCreateFlags) -> Self { self.flags = flags; self } #[inline] pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo<'a>]) -> Self { self.stage_count = stages.len() as _; self.p_stages = stages.as_ptr(); self } #[inline] pub fn library_info(mut self, library_info: &'a PipelineLibraryCreateInfoKHR<'a>) -> Self { self.p_library_info = library_info; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self { self.base_pipeline_handle = base_pipeline_handle; self } #[inline] pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self { self.base_pipeline_index = base_pipeline_index; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PipelineShaderStageNodeCreateInfoAMDX<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_name: *const c_char, pub index: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PipelineShaderStageNodeCreateInfoAMDX<'_> {} unsafe impl Sync for PipelineShaderStageNodeCreateInfoAMDX<'_> {} impl ::core::default::Default for PipelineShaderStageNodeCreateInfoAMDX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_name: ::core::ptr::null(), index: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PipelineShaderStageNodeCreateInfoAMDX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX; } unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineShaderStageNodeCreateInfoAMDX<'_> {} impl<'a> PipelineShaderStageNodeCreateInfoAMDX<'a> { #[inline] pub fn name(mut self, name: &'a CStr) -> Self { self.p_name = name.as_ptr(); self } #[inline] pub unsafe fn name_as_c_str(&self) -> Option<&CStr> { if self.p_name.is_null() { None } else { Some(CStr::from_ptr(self.p_name)) } } #[inline] pub fn index(mut self, index: u32) -> Self { self.index = index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct ExecutionGraphPipelineScratchSizeAMDX<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub size: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for ExecutionGraphPipelineScratchSizeAMDX<'_> {} unsafe impl Sync for ExecutionGraphPipelineScratchSizeAMDX<'_> {} impl ::core::default::Default for ExecutionGraphPipelineScratchSizeAMDX<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), size: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for ExecutionGraphPipelineScratchSizeAMDX<'a> { const STRUCTURE_TYPE: StructureType = StructureType::EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX; } impl<'a> ExecutionGraphPipelineScratchSizeAMDX<'a> { #[inline] pub fn size(mut self, size: DeviceSize) -> Self { self.size = size; self } } #[repr(C)] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DispatchGraphInfoAMDX { pub node_index: u32, pub payload_count: u32, pub payloads: DeviceOrHostAddressConstAMDX, pub payload_stride: u64, } #[cfg(feature = "debug")] impl fmt::Debug for DispatchGraphInfoAMDX { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DispatchGraphInfoAMDX") .field("node_index", &self.node_index) .field("payload_count", &self.payload_count) .field("payloads", &"union") .field("payload_stride", &self.payload_stride) .finish() } } impl DispatchGraphInfoAMDX { #[inline] pub fn node_index(mut self, node_index: u32) -> Self { self.node_index = node_index; self } #[inline] pub fn payload_count(mut self, payload_count: u32) -> Self { self.payload_count = payload_count; self } #[inline] pub fn payloads(mut self, payloads: DeviceOrHostAddressConstAMDX) -> Self { self.payloads = payloads; self } #[inline] pub fn payload_stride(mut self, payload_stride: u64) -> Self { self.payload_stride = payload_stride; self } } #[repr(C)] #[derive(Copy, Clone, Default)] #[doc = ""] #[must_use] pub struct DispatchGraphCountInfoAMDX { pub count: u32, pub infos: DeviceOrHostAddressConstAMDX, pub stride: u64, } #[cfg(feature = "debug")] impl fmt::Debug for DispatchGraphCountInfoAMDX { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt.debug_struct("DispatchGraphCountInfoAMDX") .field("count", &self.count) .field("infos", &"union") .field("stride", &self.stride) .finish() } } impl DispatchGraphCountInfoAMDX { #[inline] pub fn count(mut self, count: u32) -> Self { self.count = count; self } #[inline] pub fn infos(mut self, infos: DeviceOrHostAddressConstAMDX) -> Self { self.infos = infos; self } #[inline] pub fn stride(mut self, stride: u64) -> Self { self.stride = stride; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindMemoryStatusKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_result: *mut Result, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindMemoryStatusKHR<'_> {} unsafe impl Sync for BindMemoryStatusKHR<'_> {} impl ::core::default::Default for BindMemoryStatusKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_result: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindMemoryStatusKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_MEMORY_STATUS_KHR; } unsafe impl ExtendsBindBufferMemoryInfo for BindMemoryStatusKHR<'_> {} unsafe impl ExtendsBindImageMemoryInfo for BindMemoryStatusKHR<'_> {} impl<'a> BindMemoryStatusKHR<'a> { #[inline] pub fn result(mut self, result: &'a mut Result) -> Self { self.p_result = result; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindDescriptorSetsInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stage_flags: ShaderStageFlags, pub layout: PipelineLayout, pub first_set: u32, pub descriptor_set_count: u32, pub p_descriptor_sets: *const DescriptorSet, pub dynamic_offset_count: u32, pub p_dynamic_offsets: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindDescriptorSetsInfoKHR<'_> {} unsafe impl Sync for BindDescriptorSetsInfoKHR<'_> {} impl ::core::default::Default for BindDescriptorSetsInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stage_flags: ShaderStageFlags::default(), layout: PipelineLayout::default(), first_set: u32::default(), descriptor_set_count: u32::default(), p_descriptor_sets: ::core::ptr::null(), dynamic_offset_count: u32::default(), p_dynamic_offsets: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindDescriptorSetsInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_DESCRIPTOR_SETS_INFO_KHR; } pub unsafe trait ExtendsBindDescriptorSetsInfoKHR {} impl<'a> BindDescriptorSetsInfoKHR<'a> { #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn first_set(mut self, first_set: u32) -> Self { self.first_set = first_set; self } #[inline] pub fn descriptor_sets(mut self, descriptor_sets: &'a [DescriptorSet]) -> Self { self.descriptor_set_count = descriptor_sets.len() as _; self.p_descriptor_sets = descriptor_sets.as_ptr(); self } #[inline] pub fn dynamic_offsets(mut self, dynamic_offsets: &'a [u32]) -> Self { self.dynamic_offset_count = dynamic_offsets.len() as _; self.p_dynamic_offsets = dynamic_offsets.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PushConstantsInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub layout: PipelineLayout, pub stage_flags: ShaderStageFlags, pub offset: u32, pub size: u32, pub p_values: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PushConstantsInfoKHR<'_> {} unsafe impl Sync for PushConstantsInfoKHR<'_> {} impl ::core::default::Default for PushConstantsInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), layout: PipelineLayout::default(), stage_flags: ShaderStageFlags::default(), offset: u32::default(), size: u32::default(), p_values: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PushConstantsInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PUSH_CONSTANTS_INFO_KHR; } pub unsafe trait ExtendsPushConstantsInfoKHR {} impl<'a> PushConstantsInfoKHR<'a> { #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn offset(mut self, offset: u32) -> Self { self.offset = offset; self } #[inline] pub fn values(mut self, values: &'a [u8]) -> Self { self.size = values.len() as _; self.p_values = values.as_ptr().cast(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next(mut self, next: &'a mut T) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PushDescriptorSetInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stage_flags: ShaderStageFlags, pub layout: PipelineLayout, pub set: u32, pub descriptor_write_count: u32, pub p_descriptor_writes: *const WriteDescriptorSet<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PushDescriptorSetInfoKHR<'_> {} unsafe impl Sync for PushDescriptorSetInfoKHR<'_> {} impl ::core::default::Default for PushDescriptorSetInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stage_flags: ShaderStageFlags::default(), layout: PipelineLayout::default(), set: u32::default(), descriptor_write_count: u32::default(), p_descriptor_writes: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PushDescriptorSetInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PUSH_DESCRIPTOR_SET_INFO_KHR; } pub unsafe trait ExtendsPushDescriptorSetInfoKHR {} impl<'a> PushDescriptorSetInfoKHR<'a> { #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn set(mut self, set: u32) -> Self { self.set = set; self } #[inline] pub fn descriptor_writes(mut self, descriptor_writes: &'a [WriteDescriptorSet<'a>]) -> Self { self.descriptor_write_count = descriptor_writes.len() as _; self.p_descriptor_writes = descriptor_writes.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PushDescriptorSetWithTemplateInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub descriptor_update_template: DescriptorUpdateTemplate, pub layout: PipelineLayout, pub set: u32, pub p_data: *const c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PushDescriptorSetWithTemplateInfoKHR<'_> {} unsafe impl Sync for PushDescriptorSetWithTemplateInfoKHR<'_> {} impl ::core::default::Default for PushDescriptorSetWithTemplateInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), descriptor_update_template: DescriptorUpdateTemplate::default(), layout: PipelineLayout::default(), set: u32::default(), p_data: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PushDescriptorSetWithTemplateInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR; } pub unsafe trait ExtendsPushDescriptorSetWithTemplateInfoKHR {} impl<'a> PushDescriptorSetWithTemplateInfoKHR<'a> { #[inline] pub fn descriptor_update_template( mut self, descriptor_update_template: DescriptorUpdateTemplate, ) -> Self { self.descriptor_update_template = descriptor_update_template; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn set(mut self, set: u32) -> Self { self.set = set; self } #[inline] pub fn data(mut self, data: *const c_void) -> Self { self.p_data = data; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SetDescriptorBufferOffsetsInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stage_flags: ShaderStageFlags, pub layout: PipelineLayout, pub first_set: u32, pub set_count: u32, pub p_buffer_indices: *const u32, pub p_offsets: *const DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SetDescriptorBufferOffsetsInfoEXT<'_> {} unsafe impl Sync for SetDescriptorBufferOffsetsInfoEXT<'_> {} impl ::core::default::Default for SetDescriptorBufferOffsetsInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stage_flags: ShaderStageFlags::default(), layout: PipelineLayout::default(), first_set: u32::default(), set_count: u32::default(), p_buffer_indices: ::core::ptr::null(), p_offsets: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SetDescriptorBufferOffsetsInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT; } pub unsafe trait ExtendsSetDescriptorBufferOffsetsInfoEXT {} impl<'a> SetDescriptorBufferOffsetsInfoEXT<'a> { #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn first_set(mut self, first_set: u32) -> Self { self.first_set = first_set; self } #[inline] pub fn buffer_indices(mut self, buffer_indices: &'a [u32]) -> Self { self.set_count = buffer_indices.len() as _; self.p_buffer_indices = buffer_indices.as_ptr(); self } #[inline] pub fn offsets(mut self, offsets: &'a [DeviceSize]) -> Self { self.set_count = offsets.len() as _; self.p_offsets = offsets.as_ptr(); self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BindDescriptorBufferEmbeddedSamplersInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stage_flags: ShaderStageFlags, pub layout: PipelineLayout, pub set: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BindDescriptorBufferEmbeddedSamplersInfoEXT<'_> {} unsafe impl Sync for BindDescriptorBufferEmbeddedSamplersInfoEXT<'_> {} impl ::core::default::Default for BindDescriptorBufferEmbeddedSamplersInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stage_flags: ShaderStageFlags::default(), layout: PipelineLayout::default(), set: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BindDescriptorBufferEmbeddedSamplersInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT; } pub unsafe trait ExtendsBindDescriptorBufferEmbeddedSamplersInfoEXT {} impl<'a> BindDescriptorBufferEmbeddedSamplersInfoEXT<'a> { #[inline] pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self { self.stage_flags = stage_flags; self } #[inline] pub fn layout(mut self, layout: PipelineLayout) -> Self { self.layout = layout; self } #[inline] pub fn set(mut self, set: u32) -> Self { self.set = set; self } #[doc = r" Prepends the given extension struct between the root and the first pointer. This"] #[doc = r" method only exists on structs that can be passed to a function directly. Only"] #[doc = r" valid extension structs can be pushed into the chain."] #[doc = r" If the chain looks like `A -> B -> C`, and you call `x.push_next(&mut D)`, then the"] #[doc = r" chain will look like `A -> D -> B -> C`."] pub fn push_next( mut self, next: &'a mut T, ) -> Self { unsafe { let next_ptr = <*const T>::cast(next); let last_next = ptr_chain_iter(next).last().unwrap(); (*last_next).p_next = self.p_next as _; self.p_next = next_ptr; } self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCubicClampFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cubic_range_clamp: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCubicClampFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceCubicClampFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceCubicClampFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cubic_range_clamp: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCubicClampFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCubicClampFeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCubicClampFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceCubicClampFeaturesQCOM<'a> { #[inline] pub fn cubic_range_clamp(mut self, cubic_range_clamp: bool) -> Self { self.cubic_range_clamp = cubic_range_clamp.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceYcbcrDegammaFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub ycbcr_degamma: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), ycbcr_degamma: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcrDegammaFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceYcbcrDegammaFeaturesQCOM<'a> { #[inline] pub fn ycbcr_degamma(mut self, ycbcr_degamma: bool) -> Self { self.ycbcr_degamma = ycbcr_degamma.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub enable_y_degamma: Bool32, pub enable_cb_cr_degamma: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'_> {} unsafe impl Sync for SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'_> {} impl ::core::default::Default for SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), enable_y_degamma: Bool32::default(), enable_cb_cr_degamma: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM; } unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'_> { } impl<'a> SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM<'a> { #[inline] pub fn enable_y_degamma(mut self, enable_y_degamma: bool) -> Self { self.enable_y_degamma = enable_y_degamma.into(); self } #[inline] pub fn enable_cb_cr_degamma(mut self, enable_cb_cr_degamma: bool) -> Self { self.enable_cb_cr_degamma = enable_cb_cr_degamma.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCubicWeightsFeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub selectable_cubic_weights: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCubicWeightsFeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceCubicWeightsFeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceCubicWeightsFeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), selectable_cubic_weights: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCubicWeightsFeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCubicWeightsFeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCubicWeightsFeaturesQCOM<'_> {} impl<'a> PhysicalDeviceCubicWeightsFeaturesQCOM<'a> { #[inline] pub fn selectable_cubic_weights(mut self, selectable_cubic_weights: bool) -> Self { self.selectable_cubic_weights = selectable_cubic_weights.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerCubicWeightsCreateInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub cubic_weights: CubicFilterWeightsQCOM, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerCubicWeightsCreateInfoQCOM<'_> {} unsafe impl Sync for SamplerCubicWeightsCreateInfoQCOM<'_> {} impl ::core::default::Default for SamplerCubicWeightsCreateInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), cubic_weights: CubicFilterWeightsQCOM::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerCubicWeightsCreateInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM; } unsafe impl ExtendsSamplerCreateInfo for SamplerCubicWeightsCreateInfoQCOM<'_> {} impl<'a> SamplerCubicWeightsCreateInfoQCOM<'a> { #[inline] pub fn cubic_weights(mut self, cubic_weights: CubicFilterWeightsQCOM) -> Self { self.cubic_weights = cubic_weights; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct BlitImageCubicWeightsInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub cubic_weights: CubicFilterWeightsQCOM, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for BlitImageCubicWeightsInfoQCOM<'_> {} unsafe impl Sync for BlitImageCubicWeightsInfoQCOM<'_> {} impl ::core::default::Default for BlitImageCubicWeightsInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), cubic_weights: CubicFilterWeightsQCOM::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for BlitImageCubicWeightsInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM; } unsafe impl ExtendsBlitImageInfo2 for BlitImageCubicWeightsInfoQCOM<'_> {} impl<'a> BlitImageCubicWeightsInfoQCOM<'a> { #[inline] pub fn cubic_weights(mut self, cubic_weights: CubicFilterWeightsQCOM) -> Self { self.cubic_weights = cubic_weights; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageProcessing2FeaturesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub texture_block_match2: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageProcessing2FeaturesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceImageProcessing2FeaturesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceImageProcessing2FeaturesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), texture_block_match2: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageProcessing2FeaturesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageProcessing2FeaturesQCOM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageProcessing2FeaturesQCOM<'_> {} impl<'a> PhysicalDeviceImageProcessing2FeaturesQCOM<'a> { #[inline] pub fn texture_block_match2(mut self, texture_block_match2: bool) -> Self { self.texture_block_match2 = texture_block_match2.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceImageProcessing2PropertiesQCOM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub max_block_match_window: Extent2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceImageProcessing2PropertiesQCOM<'_> {} unsafe impl Sync for PhysicalDeviceImageProcessing2PropertiesQCOM<'_> {} impl ::core::default::Default for PhysicalDeviceImageProcessing2PropertiesQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), max_block_match_window: Extent2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceImageProcessing2PropertiesQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceImageProcessing2PropertiesQCOM<'_> {} impl<'a> PhysicalDeviceImageProcessing2PropertiesQCOM<'a> { #[inline] pub fn max_block_match_window(mut self, max_block_match_window: Extent2D) -> Self { self.max_block_match_window = max_block_match_window; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SamplerBlockMatchWindowCreateInfoQCOM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub window_extent: Extent2D, pub window_compare_mode: BlockMatchWindowCompareModeQCOM, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SamplerBlockMatchWindowCreateInfoQCOM<'_> {} unsafe impl Sync for SamplerBlockMatchWindowCreateInfoQCOM<'_> {} impl ::core::default::Default for SamplerBlockMatchWindowCreateInfoQCOM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), window_extent: Extent2D::default(), window_compare_mode: BlockMatchWindowCompareModeQCOM::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SamplerBlockMatchWindowCreateInfoQCOM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM; } unsafe impl ExtendsSamplerCreateInfo for SamplerBlockMatchWindowCreateInfoQCOM<'_> {} impl<'a> SamplerBlockMatchWindowCreateInfoQCOM<'a> { #[inline] pub fn window_extent(mut self, window_extent: Extent2D) -> Self { self.window_extent = window_extent; self } #[inline] pub fn window_compare_mode( mut self, window_compare_mode: BlockMatchWindowCompareModeQCOM, ) -> Self { self.window_compare_mode = window_compare_mode; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub descriptor_pool_overallocation: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), descriptor_pool_overallocation: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'_> {} impl<'a> PhysicalDeviceDescriptorPoolOverallocationFeaturesNV<'a> { #[inline] pub fn descriptor_pool_overallocation(mut self, descriptor_pool_overallocation: bool) -> Self { self.descriptor_pool_overallocation = descriptor_pool_overallocation.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceLayeredDriverPropertiesMSFT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub underlying_api: LayeredDriverUnderlyingApiMSFT, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceLayeredDriverPropertiesMSFT<'_> {} unsafe impl Sync for PhysicalDeviceLayeredDriverPropertiesMSFT<'_> {} impl ::core::default::Default for PhysicalDeviceLayeredDriverPropertiesMSFT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), underlying_api: LayeredDriverUnderlyingApiMSFT::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceLayeredDriverPropertiesMSFT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceLayeredDriverPropertiesMSFT<'_> {} impl<'a> PhysicalDeviceLayeredDriverPropertiesMSFT<'a> { #[inline] pub fn underlying_api(mut self, underlying_api: LayeredDriverUnderlyingApiMSFT) -> Self { self.underlying_api = underlying_api; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDevicePerStageDescriptorSetFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub per_stage_descriptor_set: Bool32, pub dynamic_pipeline_layout: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDevicePerStageDescriptorSetFeaturesNV<'_> {} unsafe impl Sync for PhysicalDevicePerStageDescriptorSetFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDevicePerStageDescriptorSetFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), per_stage_descriptor_set: Bool32::default(), dynamic_pipeline_layout: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDevicePerStageDescriptorSetFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePerStageDescriptorSetFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePerStageDescriptorSetFeaturesNV<'_> {} impl<'a> PhysicalDevicePerStageDescriptorSetFeaturesNV<'a> { #[inline] pub fn per_stage_descriptor_set(mut self, per_stage_descriptor_set: bool) -> Self { self.per_stage_descriptor_set = per_stage_descriptor_set.into(); self } #[inline] pub fn dynamic_pipeline_layout(mut self, dynamic_pipeline_layout: bool) -> Self { self.dynamic_pipeline_layout = dynamic_pipeline_layout.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalFormatResolveFeaturesANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub external_format_resolve: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'_> {} unsafe impl Sync for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'_> {} impl ::core::default::Default for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), external_format_resolve: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExternalFormatResolveFeaturesANDROID<'_> {} impl<'a> PhysicalDeviceExternalFormatResolveFeaturesANDROID<'a> { #[inline] pub fn external_format_resolve(mut self, external_format_resolve: bool) -> Self { self.external_format_resolve = external_format_resolve.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceExternalFormatResolvePropertiesANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub null_color_attachment_with_external_format_resolve: Bool32, pub external_format_resolve_chroma_offset_x: ChromaLocation, pub external_format_resolve_chroma_offset_y: ChromaLocation, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceExternalFormatResolvePropertiesANDROID<'_> {} unsafe impl Sync for PhysicalDeviceExternalFormatResolvePropertiesANDROID<'_> {} impl ::core::default::Default for PhysicalDeviceExternalFormatResolvePropertiesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), null_color_attachment_with_external_format_resolve: Bool32::default(), external_format_resolve_chroma_offset_x: ChromaLocation::default(), external_format_resolve_chroma_offset_y: ChromaLocation::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceExternalFormatResolvePropertiesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExternalFormatResolvePropertiesANDROID<'_> { } impl<'a> PhysicalDeviceExternalFormatResolvePropertiesANDROID<'a> { #[inline] pub fn null_color_attachment_with_external_format_resolve( mut self, null_color_attachment_with_external_format_resolve: bool, ) -> Self { self.null_color_attachment_with_external_format_resolve = null_color_attachment_with_external_format_resolve.into(); self } #[inline] pub fn external_format_resolve_chroma_offset_x( mut self, external_format_resolve_chroma_offset_x: ChromaLocation, ) -> Self { self.external_format_resolve_chroma_offset_x = external_format_resolve_chroma_offset_x; self } #[inline] pub fn external_format_resolve_chroma_offset_y( mut self, external_format_resolve_chroma_offset_y: ChromaLocation, ) -> Self { self.external_format_resolve_chroma_offset_y = external_format_resolve_chroma_offset_y; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct AndroidHardwareBufferFormatResolvePropertiesANDROID<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub color_attachment_format: Format, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for AndroidHardwareBufferFormatResolvePropertiesANDROID<'_> {} unsafe impl Sync for AndroidHardwareBufferFormatResolvePropertiesANDROID<'_> {} impl ::core::default::Default for AndroidHardwareBufferFormatResolvePropertiesANDROID<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), color_attachment_format: Format::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for AndroidHardwareBufferFormatResolvePropertiesANDROID<'a> { const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID; } unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID for AndroidHardwareBufferFormatResolvePropertiesANDROID<'_> { } impl<'a> AndroidHardwareBufferFormatResolvePropertiesANDROID<'a> { #[inline] pub fn color_attachment_format(mut self, color_attachment_format: Format) -> Self { self.color_attachment_format = color_attachment_format; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LatencySleepModeInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub low_latency_mode: Bool32, pub low_latency_boost: Bool32, pub minimum_interval_us: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LatencySleepModeInfoNV<'_> {} unsafe impl Sync for LatencySleepModeInfoNV<'_> {} impl ::core::default::Default for LatencySleepModeInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), low_latency_mode: Bool32::default(), low_latency_boost: Bool32::default(), minimum_interval_us: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LatencySleepModeInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LATENCY_SLEEP_MODE_INFO_NV; } impl<'a> LatencySleepModeInfoNV<'a> { #[inline] pub fn low_latency_mode(mut self, low_latency_mode: bool) -> Self { self.low_latency_mode = low_latency_mode.into(); self } #[inline] pub fn low_latency_boost(mut self, low_latency_boost: bool) -> Self { self.low_latency_boost = low_latency_boost.into(); self } #[inline] pub fn minimum_interval_us(mut self, minimum_interval_us: u32) -> Self { self.minimum_interval_us = minimum_interval_us; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LatencySleepInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub signal_semaphore: Semaphore, pub value: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LatencySleepInfoNV<'_> {} unsafe impl Sync for LatencySleepInfoNV<'_> {} impl ::core::default::Default for LatencySleepInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), signal_semaphore: Semaphore::default(), value: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LatencySleepInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LATENCY_SLEEP_INFO_NV; } impl<'a> LatencySleepInfoNV<'a> { #[inline] pub fn signal_semaphore(mut self, signal_semaphore: Semaphore) -> Self { self.signal_semaphore = signal_semaphore; self } #[inline] pub fn value(mut self, value: u64) -> Self { self.value = value; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SetLatencyMarkerInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub present_id: u64, pub marker: LatencyMarkerNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SetLatencyMarkerInfoNV<'_> {} unsafe impl Sync for SetLatencyMarkerInfoNV<'_> {} impl ::core::default::Default for SetLatencyMarkerInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), present_id: u64::default(), marker: LatencyMarkerNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SetLatencyMarkerInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SET_LATENCY_MARKER_INFO_NV; } impl<'a> SetLatencyMarkerInfoNV<'a> { #[inline] pub fn present_id(mut self, present_id: u64) -> Self { self.present_id = present_id; self } #[inline] pub fn marker(mut self, marker: LatencyMarkerNV) -> Self { self.marker = marker; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct GetLatencyMarkerInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub timing_count: u32, pub p_timings: *mut LatencyTimingsFrameReportNV<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for GetLatencyMarkerInfoNV<'_> {} unsafe impl Sync for GetLatencyMarkerInfoNV<'_> {} impl ::core::default::Default for GetLatencyMarkerInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), timing_count: u32::default(), p_timings: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for GetLatencyMarkerInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::GET_LATENCY_MARKER_INFO_NV; } impl<'a> GetLatencyMarkerInfoNV<'a> { #[inline] pub fn timings(mut self, timings: &'a mut [LatencyTimingsFrameReportNV<'_>]) -> Self { self.timing_count = timings.len() as _; self.p_timings = timings.as_mut_ptr().cast(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LatencyTimingsFrameReportNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub present_id: u64, pub input_sample_time_us: u64, pub sim_start_time_us: u64, pub sim_end_time_us: u64, pub render_submit_start_time_us: u64, pub render_submit_end_time_us: u64, pub present_start_time_us: u64, pub present_end_time_us: u64, pub driver_start_time_us: u64, pub driver_end_time_us: u64, pub os_render_queue_start_time_us: u64, pub os_render_queue_end_time_us: u64, pub gpu_render_start_time_us: u64, pub gpu_render_end_time_us: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LatencyTimingsFrameReportNV<'_> {} unsafe impl Sync for LatencyTimingsFrameReportNV<'_> {} impl ::core::default::Default for LatencyTimingsFrameReportNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), present_id: u64::default(), input_sample_time_us: u64::default(), sim_start_time_us: u64::default(), sim_end_time_us: u64::default(), render_submit_start_time_us: u64::default(), render_submit_end_time_us: u64::default(), present_start_time_us: u64::default(), present_end_time_us: u64::default(), driver_start_time_us: u64::default(), driver_end_time_us: u64::default(), os_render_queue_start_time_us: u64::default(), os_render_queue_end_time_us: u64::default(), gpu_render_start_time_us: u64::default(), gpu_render_end_time_us: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LatencyTimingsFrameReportNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LATENCY_TIMINGS_FRAME_REPORT_NV; } impl<'a> LatencyTimingsFrameReportNV<'a> { #[inline] pub fn present_id(mut self, present_id: u64) -> Self { self.present_id = present_id; self } #[inline] pub fn input_sample_time_us(mut self, input_sample_time_us: u64) -> Self { self.input_sample_time_us = input_sample_time_us; self } #[inline] pub fn sim_start_time_us(mut self, sim_start_time_us: u64) -> Self { self.sim_start_time_us = sim_start_time_us; self } #[inline] pub fn sim_end_time_us(mut self, sim_end_time_us: u64) -> Self { self.sim_end_time_us = sim_end_time_us; self } #[inline] pub fn render_submit_start_time_us(mut self, render_submit_start_time_us: u64) -> Self { self.render_submit_start_time_us = render_submit_start_time_us; self } #[inline] pub fn render_submit_end_time_us(mut self, render_submit_end_time_us: u64) -> Self { self.render_submit_end_time_us = render_submit_end_time_us; self } #[inline] pub fn present_start_time_us(mut self, present_start_time_us: u64) -> Self { self.present_start_time_us = present_start_time_us; self } #[inline] pub fn present_end_time_us(mut self, present_end_time_us: u64) -> Self { self.present_end_time_us = present_end_time_us; self } #[inline] pub fn driver_start_time_us(mut self, driver_start_time_us: u64) -> Self { self.driver_start_time_us = driver_start_time_us; self } #[inline] pub fn driver_end_time_us(mut self, driver_end_time_us: u64) -> Self { self.driver_end_time_us = driver_end_time_us; self } #[inline] pub fn os_render_queue_start_time_us(mut self, os_render_queue_start_time_us: u64) -> Self { self.os_render_queue_start_time_us = os_render_queue_start_time_us; self } #[inline] pub fn os_render_queue_end_time_us(mut self, os_render_queue_end_time_us: u64) -> Self { self.os_render_queue_end_time_us = os_render_queue_end_time_us; self } #[inline] pub fn gpu_render_start_time_us(mut self, gpu_render_start_time_us: u64) -> Self { self.gpu_render_start_time_us = gpu_render_start_time_us; self } #[inline] pub fn gpu_render_end_time_us(mut self, gpu_render_end_time_us: u64) -> Self { self.gpu_render_end_time_us = gpu_render_end_time_us; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct OutOfBandQueueTypeInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub queue_type: OutOfBandQueueTypeNV, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for OutOfBandQueueTypeInfoNV<'_> {} unsafe impl Sync for OutOfBandQueueTypeInfoNV<'_> {} impl ::core::default::Default for OutOfBandQueueTypeInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), queue_type: OutOfBandQueueTypeNV::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for OutOfBandQueueTypeInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::OUT_OF_BAND_QUEUE_TYPE_INFO_NV; } impl<'a> OutOfBandQueueTypeInfoNV<'a> { #[inline] pub fn queue_type(mut self, queue_type: OutOfBandQueueTypeNV) -> Self { self.queue_type = queue_type; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LatencySubmissionPresentIdNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub present_id: u64, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LatencySubmissionPresentIdNV<'_> {} unsafe impl Sync for LatencySubmissionPresentIdNV<'_> {} impl ::core::default::Default for LatencySubmissionPresentIdNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), present_id: u64::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LatencySubmissionPresentIdNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LATENCY_SUBMISSION_PRESENT_ID_NV; } unsafe impl ExtendsSubmitInfo for LatencySubmissionPresentIdNV<'_> {} unsafe impl ExtendsSubmitInfo2 for LatencySubmissionPresentIdNV<'_> {} impl<'a> LatencySubmissionPresentIdNV<'a> { #[inline] pub fn present_id(mut self, present_id: u64) -> Self { self.present_id = present_id; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct SwapchainLatencyCreateInfoNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub latency_mode_enable: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for SwapchainLatencyCreateInfoNV<'_> {} unsafe impl Sync for SwapchainLatencyCreateInfoNV<'_> {} impl ::core::default::Default for SwapchainLatencyCreateInfoNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), latency_mode_enable: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for SwapchainLatencyCreateInfoNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_LATENCY_CREATE_INFO_NV; } unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainLatencyCreateInfoNV<'_> {} impl<'a> SwapchainLatencyCreateInfoNV<'a> { #[inline] pub fn latency_mode_enable(mut self, latency_mode_enable: bool) -> Self { self.latency_mode_enable = latency_mode_enable.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct LatencySurfaceCapabilitiesNV<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub present_mode_count: u32, pub p_present_modes: *mut PresentModeKHR, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for LatencySurfaceCapabilitiesNV<'_> {} unsafe impl Sync for LatencySurfaceCapabilitiesNV<'_> {} impl ::core::default::Default for LatencySurfaceCapabilitiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), present_mode_count: u32::default(), p_present_modes: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for LatencySurfaceCapabilitiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::LATENCY_SURFACE_CAPABILITIES_NV; } unsafe impl ExtendsSurfaceCapabilities2KHR for LatencySurfaceCapabilitiesNV<'_> {} impl<'a> LatencySurfaceCapabilitiesNV<'a> { #[inline] pub fn present_modes(mut self, present_modes: &'a mut [PresentModeKHR]) -> Self { self.present_mode_count = present_modes.len() as _; self.p_present_modes = present_modes.as_mut_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCudaKernelLaunchFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub cuda_kernel_launch_features: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCudaKernelLaunchFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceCudaKernelLaunchFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCudaKernelLaunchFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), cuda_kernel_launch_features: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCudaKernelLaunchFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCudaKernelLaunchFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCudaKernelLaunchFeaturesNV<'_> {} impl<'a> PhysicalDeviceCudaKernelLaunchFeaturesNV<'a> { #[inline] pub fn cuda_kernel_launch_features(mut self, cuda_kernel_launch_features: bool) -> Self { self.cuda_kernel_launch_features = cuda_kernel_launch_features.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceCudaKernelLaunchPropertiesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub compute_capability_minor: u32, pub compute_capability_major: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceCudaKernelLaunchPropertiesNV<'_> {} unsafe impl Sync for PhysicalDeviceCudaKernelLaunchPropertiesNV<'_> {} impl ::core::default::Default for PhysicalDeviceCudaKernelLaunchPropertiesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), compute_capability_minor: u32::default(), compute_capability_major: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceCudaKernelLaunchPropertiesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCudaKernelLaunchPropertiesNV<'_> {} impl<'a> PhysicalDeviceCudaKernelLaunchPropertiesNV<'a> { #[inline] pub fn compute_capability_minor(mut self, compute_capability_minor: u32) -> Self { self.compute_capability_minor = compute_capability_minor; self } #[inline] pub fn compute_capability_major(mut self, compute_capability_major: u32) -> Self { self.compute_capability_major = compute_capability_major; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct DeviceQueueShaderCoreControlCreateInfoARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_core_count: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for DeviceQueueShaderCoreControlCreateInfoARM<'_> {} unsafe impl Sync for DeviceQueueShaderCoreControlCreateInfoARM<'_> {} impl ::core::default::Default for DeviceQueueShaderCoreControlCreateInfoARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_core_count: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for DeviceQueueShaderCoreControlCreateInfoARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM; } unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueShaderCoreControlCreateInfoARM<'_> {} unsafe impl ExtendsDeviceCreateInfo for DeviceQueueShaderCoreControlCreateInfoARM<'_> {} impl<'a> DeviceQueueShaderCoreControlCreateInfoARM<'a> { #[inline] pub fn shader_core_count(mut self, shader_core_count: u32) -> Self { self.shader_core_count = shader_core_count; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSchedulingControlsFeaturesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub scheduling_controls: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSchedulingControlsFeaturesARM<'_> {} unsafe impl Sync for PhysicalDeviceSchedulingControlsFeaturesARM<'_> {} impl ::core::default::Default for PhysicalDeviceSchedulingControlsFeaturesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), scheduling_controls: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSchedulingControlsFeaturesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSchedulingControlsFeaturesARM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSchedulingControlsFeaturesARM<'_> {} impl<'a> PhysicalDeviceSchedulingControlsFeaturesARM<'a> { #[inline] pub fn scheduling_controls(mut self, scheduling_controls: bool) -> Self { self.scheduling_controls = scheduling_controls.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceSchedulingControlsPropertiesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub scheduling_controls_flags: PhysicalDeviceSchedulingControlsFlagsARM, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceSchedulingControlsPropertiesARM<'_> {} unsafe impl Sync for PhysicalDeviceSchedulingControlsPropertiesARM<'_> {} impl ::core::default::Default for PhysicalDeviceSchedulingControlsPropertiesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), scheduling_controls_flags: PhysicalDeviceSchedulingControlsFlagsARM::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceSchedulingControlsPropertiesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSchedulingControlsPropertiesARM<'_> {} impl<'a> PhysicalDeviceSchedulingControlsPropertiesARM<'a> { #[inline] pub fn scheduling_controls_flags( mut self, scheduling_controls_flags: PhysicalDeviceSchedulingControlsFlagsARM, ) -> Self { self.scheduling_controls_flags = scheduling_controls_flags; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub relaxed_line_rasterization: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'_> {} unsafe impl Sync for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'_> {} impl ::core::default::Default for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), relaxed_line_rasterization: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'_> {} impl<'a> PhysicalDeviceRelaxedLineRasterizationFeaturesIMG<'a> { #[inline] pub fn relaxed_line_rasterization(mut self, relaxed_line_rasterization: bool) -> Self { self.relaxed_line_rasterization = relaxed_line_rasterization.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRenderPassStripedFeaturesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub render_pass_striped: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRenderPassStripedFeaturesARM<'_> {} unsafe impl Sync for PhysicalDeviceRenderPassStripedFeaturesARM<'_> {} impl ::core::default::Default for PhysicalDeviceRenderPassStripedFeaturesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), render_pass_striped: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRenderPassStripedFeaturesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRenderPassStripedFeaturesARM<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRenderPassStripedFeaturesARM<'_> {} impl<'a> PhysicalDeviceRenderPassStripedFeaturesARM<'a> { #[inline] pub fn render_pass_striped(mut self, render_pass_striped: bool) -> Self { self.render_pass_striped = render_pass_striped.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRenderPassStripedPropertiesARM<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub render_pass_stripe_granularity: Extent2D, pub max_render_pass_stripes: u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRenderPassStripedPropertiesARM<'_> {} unsafe impl Sync for PhysicalDeviceRenderPassStripedPropertiesARM<'_> {} impl ::core::default::Default for PhysicalDeviceRenderPassStripedPropertiesARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), render_pass_stripe_granularity: Extent2D::default(), max_render_pass_stripes: u32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRenderPassStripedPropertiesARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRenderPassStripedPropertiesARM<'_> {} impl<'a> PhysicalDeviceRenderPassStripedPropertiesARM<'a> { #[inline] pub fn render_pass_stripe_granularity( mut self, render_pass_stripe_granularity: Extent2D, ) -> Self { self.render_pass_stripe_granularity = render_pass_stripe_granularity; self } #[inline] pub fn max_render_pass_stripes(mut self, max_render_pass_stripes: u32) -> Self { self.max_render_pass_stripes = max_render_pass_stripes; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassStripeInfoARM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stripe_area: Rect2D, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassStripeInfoARM<'_> {} unsafe impl Sync for RenderPassStripeInfoARM<'_> {} impl ::core::default::Default for RenderPassStripeInfoARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stripe_area: Rect2D::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassStripeInfoARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_STRIPE_INFO_ARM; } impl<'a> RenderPassStripeInfoARM<'a> { #[inline] pub fn stripe_area(mut self, stripe_area: Rect2D) -> Self { self.stripe_area = stripe_area; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassStripeBeginInfoARM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stripe_info_count: u32, pub p_stripe_infos: *const RenderPassStripeInfoARM<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassStripeBeginInfoARM<'_> {} unsafe impl Sync for RenderPassStripeBeginInfoARM<'_> {} impl ::core::default::Default for RenderPassStripeBeginInfoARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stripe_info_count: u32::default(), p_stripe_infos: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassStripeBeginInfoARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_STRIPE_BEGIN_INFO_ARM; } unsafe impl ExtendsRenderingInfo for RenderPassStripeBeginInfoARM<'_> {} unsafe impl ExtendsRenderPassBeginInfo for RenderPassStripeBeginInfoARM<'_> {} impl<'a> RenderPassStripeBeginInfoARM<'a> { #[inline] pub fn stripe_infos(mut self, stripe_infos: &'a [RenderPassStripeInfoARM<'a>]) -> Self { self.stripe_info_count = stripe_infos.len() as _; self.p_stripe_infos = stripe_infos.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderPassStripeSubmitInfoARM<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub stripe_semaphore_info_count: u32, pub p_stripe_semaphore_infos: *const SemaphoreSubmitInfo<'a>, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderPassStripeSubmitInfoARM<'_> {} unsafe impl Sync for RenderPassStripeSubmitInfoARM<'_> {} impl ::core::default::Default for RenderPassStripeSubmitInfoARM<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), stripe_semaphore_info_count: u32::default(), p_stripe_semaphore_infos: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderPassStripeSubmitInfoARM<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_STRIPE_SUBMIT_INFO_ARM; } unsafe impl ExtendsCommandBufferSubmitInfo for RenderPassStripeSubmitInfoARM<'_> {} impl<'a> RenderPassStripeSubmitInfoARM<'a> { #[inline] pub fn stripe_semaphore_infos( mut self, stripe_semaphore_infos: &'a [SemaphoreSubmitInfo<'a>], ) -> Self { self.stripe_semaphore_info_count = stripe_semaphore_infos.len() as _; self.p_stripe_semaphore_infos = stripe_semaphore_infos.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_maximal_reconvergence: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_maximal_reconvergence: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR<'a> { #[inline] pub fn shader_maximal_reconvergence(mut self, shader_maximal_reconvergence: bool) -> Self { self.shader_maximal_reconvergence = shader_maximal_reconvergence.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_subgroup_rotate: Bool32, pub shader_subgroup_rotate_clustered: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_subgroup_rotate: Bool32::default(), shader_subgroup_rotate_clustered: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderSubgroupRotateFeaturesKHR<'a> { #[inline] pub fn shader_subgroup_rotate(mut self, shader_subgroup_rotate: bool) -> Self { self.shader_subgroup_rotate = shader_subgroup_rotate.into(); self } #[inline] pub fn shader_subgroup_rotate_clustered( mut self, shader_subgroup_rotate_clustered: bool, ) -> Self { self.shader_subgroup_rotate_clustered = shader_subgroup_rotate_clustered.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderExpectAssumeFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_expect_assume: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_expect_assume: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderExpectAssumeFeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderExpectAssumeFeaturesKHR<'a> { #[inline] pub fn shader_expect_assume(mut self, shader_expect_assume: bool) -> Self { self.shader_expect_assume = shader_expect_assume.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderFloatControls2FeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_float_controls2: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderFloatControls2FeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderFloatControls2FeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderFloatControls2FeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_float_controls2: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderFloatControls2FeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderFloatControls2FeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderFloatControls2FeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderFloatControls2FeaturesKHR<'a> { #[inline] pub fn shader_float_controls2(mut self, shader_float_controls2: bool) -> Self { self.shader_float_controls2 = shader_float_controls2.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub dynamic_rendering_local_read: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), dynamic_rendering_local_read: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'_> {} impl<'a> PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR<'a> { #[inline] pub fn dynamic_rendering_local_read(mut self, dynamic_rendering_local_read: bool) -> Self { self.dynamic_rendering_local_read = dynamic_rendering_local_read.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingAttachmentLocationInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub color_attachment_count: u32, pub p_color_attachment_locations: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingAttachmentLocationInfoKHR<'_> {} unsafe impl Sync for RenderingAttachmentLocationInfoKHR<'_> {} impl ::core::default::Default for RenderingAttachmentLocationInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), color_attachment_count: u32::default(), p_color_attachment_locations: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingAttachmentLocationInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_ATTACHMENT_LOCATION_INFO_KHR; } unsafe impl ExtendsGraphicsPipelineCreateInfo for RenderingAttachmentLocationInfoKHR<'_> {} unsafe impl ExtendsCommandBufferInheritanceInfo for RenderingAttachmentLocationInfoKHR<'_> {} impl<'a> RenderingAttachmentLocationInfoKHR<'a> { #[inline] pub fn color_attachment_locations(mut self, color_attachment_locations: &'a [u32]) -> Self { self.color_attachment_count = color_attachment_locations.len() as _; self.p_color_attachment_locations = color_attachment_locations.as_ptr(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct RenderingInputAttachmentIndexInfoKHR<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub color_attachment_count: u32, pub p_color_attachment_input_indices: *const u32, pub p_depth_input_attachment_index: *const u32, pub p_stencil_input_attachment_index: *const u32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for RenderingInputAttachmentIndexInfoKHR<'_> {} unsafe impl Sync for RenderingInputAttachmentIndexInfoKHR<'_> {} impl ::core::default::Default for RenderingInputAttachmentIndexInfoKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), color_attachment_count: u32::default(), p_color_attachment_input_indices: ::core::ptr::null(), p_depth_input_attachment_index: ::core::ptr::null(), p_stencil_input_attachment_index: ::core::ptr::null(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for RenderingInputAttachmentIndexInfoKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR; } unsafe impl ExtendsGraphicsPipelineCreateInfo for RenderingInputAttachmentIndexInfoKHR<'_> {} unsafe impl ExtendsCommandBufferInheritanceInfo for RenderingInputAttachmentIndexInfoKHR<'_> {} impl<'a> RenderingInputAttachmentIndexInfoKHR<'a> { #[inline] pub fn color_attachment_input_indices( mut self, color_attachment_input_indices: &'a [u32], ) -> Self { self.color_attachment_count = color_attachment_input_indices.len() as _; self.p_color_attachment_input_indices = color_attachment_input_indices.as_ptr(); self } #[inline] pub fn depth_input_attachment_index(mut self, depth_input_attachment_index: &'a u32) -> Self { self.p_depth_input_attachment_index = depth_input_attachment_index; self } #[inline] pub fn stencil_input_attachment_index( mut self, stencil_input_attachment_index: &'a u32, ) -> Self { self.p_stencil_input_attachment_index = stencil_input_attachment_index; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderQuadControlFeaturesKHR<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_quad_control: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderQuadControlFeaturesKHR<'_> {} unsafe impl Sync for PhysicalDeviceShaderQuadControlFeaturesKHR<'_> {} impl ::core::default::Default for PhysicalDeviceShaderQuadControlFeaturesKHR<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_quad_control: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderQuadControlFeaturesKHR<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderQuadControlFeaturesKHR<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderQuadControlFeaturesKHR<'_> {} impl<'a> PhysicalDeviceShaderQuadControlFeaturesKHR<'a> { #[inline] pub fn shader_quad_control(mut self, shader_quad_control: bool) -> Self { self.shader_quad_control = shader_quad_control.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_float16_vector_atomics: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_float16_vector_atomics: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'_> { } unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'_> {} impl<'a> PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV<'a> { #[inline] pub fn shader_float16_vector_atomics(mut self, shader_float16_vector_atomics: bool) -> Self { self.shader_float16_vector_atomics = shader_float16_vector_atomics.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMapMemoryPlacedFeaturesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub memory_map_placed: Bool32, pub memory_map_range_placed: Bool32, pub memory_unmap_reserve: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), memory_map_placed: Bool32::default(), memory_map_range_placed: Bool32::default(), memory_unmap_reserve: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMapMemoryPlacedFeaturesEXT<'_> {} impl<'a> PhysicalDeviceMapMemoryPlacedFeaturesEXT<'a> { #[inline] pub fn memory_map_placed(mut self, memory_map_placed: bool) -> Self { self.memory_map_placed = memory_map_placed.into(); self } #[inline] pub fn memory_map_range_placed(mut self, memory_map_range_placed: bool) -> Self { self.memory_map_range_placed = memory_map_range_placed.into(); self } #[inline] pub fn memory_unmap_reserve(mut self, memory_unmap_reserve: bool) -> Self { self.memory_unmap_reserve = memory_unmap_reserve.into(); self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceMapMemoryPlacedPropertiesEXT<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub min_placed_memory_map_alignment: DeviceSize, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceMapMemoryPlacedPropertiesEXT<'_> {} unsafe impl Sync for PhysicalDeviceMapMemoryPlacedPropertiesEXT<'_> {} impl ::core::default::Default for PhysicalDeviceMapMemoryPlacedPropertiesEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), min_placed_memory_map_alignment: DeviceSize::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceMapMemoryPlacedPropertiesEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT; } unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMapMemoryPlacedPropertiesEXT<'_> {} impl<'a> PhysicalDeviceMapMemoryPlacedPropertiesEXT<'a> { #[inline] pub fn min_placed_memory_map_alignment( mut self, min_placed_memory_map_alignment: DeviceSize, ) -> Self { self.min_placed_memory_map_alignment = min_placed_memory_map_alignment; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct MemoryMapPlacedInfoEXT<'a> { pub s_type: StructureType, pub p_next: *const c_void, pub p_placed_address: *mut c_void, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for MemoryMapPlacedInfoEXT<'_> {} unsafe impl Sync for MemoryMapPlacedInfoEXT<'_> {} impl ::core::default::Default for MemoryMapPlacedInfoEXT<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null(), p_placed_address: ::core::ptr::null_mut(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for MemoryMapPlacedInfoEXT<'a> { const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_MAP_PLACED_INFO_EXT; } unsafe impl ExtendsMemoryMapInfoKHR for MemoryMapPlacedInfoEXT<'_> {} impl<'a> MemoryMapPlacedInfoEXT<'a> { #[inline] pub fn placed_address(mut self, placed_address: *mut c_void) -> Self { self.p_placed_address = placed_address; self } } #[repr(C)] #[cfg_attr(feature = "debug", derive(Debug))] #[derive(Copy, Clone)] #[doc = ""] #[must_use] pub struct PhysicalDeviceRawAccessChainsFeaturesNV<'a> { pub s_type: StructureType, pub p_next: *mut c_void, pub shader_raw_access_chains: Bool32, pub _marker: PhantomData<&'a ()>, } unsafe impl Send for PhysicalDeviceRawAccessChainsFeaturesNV<'_> {} unsafe impl Sync for PhysicalDeviceRawAccessChainsFeaturesNV<'_> {} impl ::core::default::Default for PhysicalDeviceRawAccessChainsFeaturesNV<'_> { #[inline] fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: ::core::ptr::null_mut(), shader_raw_access_chains: Bool32::default(), _marker: PhantomData, } } } unsafe impl<'a> TaggedStructure for PhysicalDeviceRawAccessChainsFeaturesNV<'a> { const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV; } unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRawAccessChainsFeaturesNV<'_> {} unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRawAccessChainsFeaturesNV<'_> {} impl<'a> PhysicalDeviceRawAccessChainsFeaturesNV<'a> { #[inline] pub fn shader_raw_access_chains(mut self, shader_raw_access_chains: bool) -> Self { self.shader_raw_access_chains = shader_raw_access_chains.into(); self } } ash-0.38.0+1.3.281/src/vk/enums.rs000064400000000000000000003314641046102023000142140ustar 00000000000000use core::fmt; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ImageLayout(pub(crate) i32); impl ImageLayout { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ImageLayout { #[doc = "Implicit layout an image is when its contents are undefined due to various reasons (e.g. right after creation)"] pub const UNDEFINED: Self = Self(0); #[doc = "General layout when image can be used for any kind of access"] pub const GENERAL: Self = Self(1); #[doc = "Optimal layout when image is only used for color attachment read/write"] pub const COLOR_ATTACHMENT_OPTIMAL: Self = Self(2); #[doc = "Optimal layout when image is only used for depth/stencil attachment read/write"] pub const DEPTH_STENCIL_ATTACHMENT_OPTIMAL: Self = Self(3); #[doc = "Optimal layout when image is used for read only depth/stencil attachment and shader access"] pub const DEPTH_STENCIL_READ_ONLY_OPTIMAL: Self = Self(4); #[doc = "Optimal layout when image is used for read only shader access"] pub const SHADER_READ_ONLY_OPTIMAL: Self = Self(5); #[doc = "Optimal layout when image is used only as source of transfer operations"] pub const TRANSFER_SRC_OPTIMAL: Self = Self(6); #[doc = "Optimal layout when image is used only as destination of transfer operations"] pub const TRANSFER_DST_OPTIMAL: Self = Self(7); #[doc = "Initial layout used when the data is populated by the CPU"] pub const PREINITIALIZED: Self = Self(8); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AttachmentLoadOp(pub(crate) i32); impl AttachmentLoadOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AttachmentLoadOp { pub const LOAD: Self = Self(0); pub const CLEAR: Self = Self(1); pub const DONT_CARE: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AttachmentStoreOp(pub(crate) i32); impl AttachmentStoreOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AttachmentStoreOp { pub const STORE: Self = Self(0); pub const DONT_CARE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ImageType(pub(crate) i32); impl ImageType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ImageType { pub const TYPE_1D: Self = Self(0); pub const TYPE_2D: Self = Self(1); pub const TYPE_3D: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ImageTiling(pub(crate) i32); impl ImageTiling { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ImageTiling { pub const OPTIMAL: Self = Self(0); pub const LINEAR: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ImageViewType(pub(crate) i32); impl ImageViewType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ImageViewType { pub const TYPE_1D: Self = Self(0); pub const TYPE_2D: Self = Self(1); pub const TYPE_3D: Self = Self(2); pub const CUBE: Self = Self(3); pub const TYPE_1D_ARRAY: Self = Self(4); pub const TYPE_2D_ARRAY: Self = Self(5); pub const CUBE_ARRAY: Self = Self(6); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CommandBufferLevel(pub(crate) i32); impl CommandBufferLevel { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CommandBufferLevel { pub const PRIMARY: Self = Self(0); pub const SECONDARY: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ComponentSwizzle(pub(crate) i32); impl ComponentSwizzle { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ComponentSwizzle { pub const IDENTITY: Self = Self(0); pub const ZERO: Self = Self(1); pub const ONE: Self = Self(2); pub const R: Self = Self(3); pub const G: Self = Self(4); pub const B: Self = Self(5); pub const A: Self = Self(6); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DescriptorType(pub(crate) i32); impl DescriptorType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DescriptorType { pub const SAMPLER: Self = Self(0); pub const COMBINED_IMAGE_SAMPLER: Self = Self(1); pub const SAMPLED_IMAGE: Self = Self(2); pub const STORAGE_IMAGE: Self = Self(3); pub const UNIFORM_TEXEL_BUFFER: Self = Self(4); pub const STORAGE_TEXEL_BUFFER: Self = Self(5); pub const UNIFORM_BUFFER: Self = Self(6); pub const STORAGE_BUFFER: Self = Self(7); pub const UNIFORM_BUFFER_DYNAMIC: Self = Self(8); pub const STORAGE_BUFFER_DYNAMIC: Self = Self(9); pub const INPUT_ATTACHMENT: Self = Self(10); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct QueryType(pub(crate) i32); impl QueryType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl QueryType { pub const OCCLUSION: Self = Self(0); #[doc = "Optional"] pub const PIPELINE_STATISTICS: Self = Self(1); pub const TIMESTAMP: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BorderColor(pub(crate) i32); impl BorderColor { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BorderColor { pub const FLOAT_TRANSPARENT_BLACK: Self = Self(0); pub const INT_TRANSPARENT_BLACK: Self = Self(1); pub const FLOAT_OPAQUE_BLACK: Self = Self(2); pub const INT_OPAQUE_BLACK: Self = Self(3); pub const FLOAT_OPAQUE_WHITE: Self = Self(4); pub const INT_OPAQUE_WHITE: Self = Self(5); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PipelineBindPoint(pub(crate) i32); impl PipelineBindPoint { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PipelineBindPoint { pub const GRAPHICS: Self = Self(0); pub const COMPUTE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PipelineCacheHeaderVersion(pub(crate) i32); impl PipelineCacheHeaderVersion { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PipelineCacheHeaderVersion { pub const ONE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PrimitiveTopology(pub(crate) i32); impl PrimitiveTopology { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PrimitiveTopology { pub const POINT_LIST: Self = Self(0); pub const LINE_LIST: Self = Self(1); pub const LINE_STRIP: Self = Self(2); pub const TRIANGLE_LIST: Self = Self(3); pub const TRIANGLE_STRIP: Self = Self(4); pub const TRIANGLE_FAN: Self = Self(5); pub const LINE_LIST_WITH_ADJACENCY: Self = Self(6); pub const LINE_STRIP_WITH_ADJACENCY: Self = Self(7); pub const TRIANGLE_LIST_WITH_ADJACENCY: Self = Self(8); pub const TRIANGLE_STRIP_WITH_ADJACENCY: Self = Self(9); pub const PATCH_LIST: Self = Self(10); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SharingMode(pub(crate) i32); impl SharingMode { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SharingMode { pub const EXCLUSIVE: Self = Self(0); pub const CONCURRENT: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct IndexType(pub(crate) i32); impl IndexType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl IndexType { pub const UINT16: Self = Self(0); pub const UINT32: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct Filter(pub(crate) i32); impl Filter { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl Filter { pub const NEAREST: Self = Self(0); pub const LINEAR: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SamplerMipmapMode(pub(crate) i32); impl SamplerMipmapMode { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SamplerMipmapMode { #[doc = "Choose nearest mip level"] pub const NEAREST: Self = Self(0); #[doc = "Linear filter between mip levels"] pub const LINEAR: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SamplerAddressMode(pub(crate) i32); impl SamplerAddressMode { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SamplerAddressMode { pub const REPEAT: Self = Self(0); pub const MIRRORED_REPEAT: Self = Self(1); pub const CLAMP_TO_EDGE: Self = Self(2); pub const CLAMP_TO_BORDER: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CompareOp(pub(crate) i32); impl CompareOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CompareOp { pub const NEVER: Self = Self(0); pub const LESS: Self = Self(1); pub const EQUAL: Self = Self(2); pub const LESS_OR_EQUAL: Self = Self(3); pub const GREATER: Self = Self(4); pub const NOT_EQUAL: Self = Self(5); pub const GREATER_OR_EQUAL: Self = Self(6); pub const ALWAYS: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PolygonMode(pub(crate) i32); impl PolygonMode { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PolygonMode { pub const FILL: Self = Self(0); pub const LINE: Self = Self(1); pub const POINT: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct FrontFace(pub(crate) i32); impl FrontFace { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl FrontFace { pub const COUNTER_CLOCKWISE: Self = Self(0); pub const CLOCKWISE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BlendFactor(pub(crate) i32); impl BlendFactor { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BlendFactor { pub const ZERO: Self = Self(0); pub const ONE: Self = Self(1); pub const SRC_COLOR: Self = Self(2); pub const ONE_MINUS_SRC_COLOR: Self = Self(3); pub const DST_COLOR: Self = Self(4); pub const ONE_MINUS_DST_COLOR: Self = Self(5); pub const SRC_ALPHA: Self = Self(6); pub const ONE_MINUS_SRC_ALPHA: Self = Self(7); pub const DST_ALPHA: Self = Self(8); pub const ONE_MINUS_DST_ALPHA: Self = Self(9); pub const CONSTANT_COLOR: Self = Self(10); pub const ONE_MINUS_CONSTANT_COLOR: Self = Self(11); pub const CONSTANT_ALPHA: Self = Self(12); pub const ONE_MINUS_CONSTANT_ALPHA: Self = Self(13); pub const SRC_ALPHA_SATURATE: Self = Self(14); pub const SRC1_COLOR: Self = Self(15); pub const ONE_MINUS_SRC1_COLOR: Self = Self(16); pub const SRC1_ALPHA: Self = Self(17); pub const ONE_MINUS_SRC1_ALPHA: Self = Self(18); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BlendOp(pub(crate) i32); impl BlendOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BlendOp { pub const ADD: Self = Self(0); pub const SUBTRACT: Self = Self(1); pub const REVERSE_SUBTRACT: Self = Self(2); pub const MIN: Self = Self(3); pub const MAX: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct StencilOp(pub(crate) i32); impl StencilOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl StencilOp { pub const KEEP: Self = Self(0); pub const ZERO: Self = Self(1); pub const REPLACE: Self = Self(2); pub const INCREMENT_AND_CLAMP: Self = Self(3); pub const DECREMENT_AND_CLAMP: Self = Self(4); pub const INVERT: Self = Self(5); pub const INCREMENT_AND_WRAP: Self = Self(6); pub const DECREMENT_AND_WRAP: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct LogicOp(pub(crate) i32); impl LogicOp { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl LogicOp { pub const CLEAR: Self = Self(0); pub const AND: Self = Self(1); pub const AND_REVERSE: Self = Self(2); pub const COPY: Self = Self(3); pub const AND_INVERTED: Self = Self(4); pub const NO_OP: Self = Self(5); pub const XOR: Self = Self(6); pub const OR: Self = Self(7); pub const NOR: Self = Self(8); pub const EQUIVALENT: Self = Self(9); pub const INVERT: Self = Self(10); pub const OR_REVERSE: Self = Self(11); pub const COPY_INVERTED: Self = Self(12); pub const OR_INVERTED: Self = Self(13); pub const NAND: Self = Self(14); pub const SET: Self = Self(15); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct InternalAllocationType(pub(crate) i32); impl InternalAllocationType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl InternalAllocationType { pub const EXECUTABLE: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SystemAllocationScope(pub(crate) i32); impl SystemAllocationScope { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SystemAllocationScope { pub const COMMAND: Self = Self(0); pub const OBJECT: Self = Self(1); pub const CACHE: Self = Self(2); pub const DEVICE: Self = Self(3); pub const INSTANCE: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PhysicalDeviceType(pub(crate) i32); impl PhysicalDeviceType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PhysicalDeviceType { pub const OTHER: Self = Self(0); pub const INTEGRATED_GPU: Self = Self(1); pub const DISCRETE_GPU: Self = Self(2); pub const VIRTUAL_GPU: Self = Self(3); pub const CPU: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct VertexInputRate(pub(crate) i32); impl VertexInputRate { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl VertexInputRate { pub const VERTEX: Self = Self(0); pub const INSTANCE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct Format(pub(crate) i32); impl Format { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl Format { pub const UNDEFINED: Self = Self(0); pub const R4G4_UNORM_PACK8: Self = Self(1); pub const R4G4B4A4_UNORM_PACK16: Self = Self(2); pub const B4G4R4A4_UNORM_PACK16: Self = Self(3); pub const R5G6B5_UNORM_PACK16: Self = Self(4); pub const B5G6R5_UNORM_PACK16: Self = Self(5); pub const R5G5B5A1_UNORM_PACK16: Self = Self(6); pub const B5G5R5A1_UNORM_PACK16: Self = Self(7); pub const A1R5G5B5_UNORM_PACK16: Self = Self(8); pub const R8_UNORM: Self = Self(9); pub const R8_SNORM: Self = Self(10); pub const R8_USCALED: Self = Self(11); pub const R8_SSCALED: Self = Self(12); pub const R8_UINT: Self = Self(13); pub const R8_SINT: Self = Self(14); pub const R8_SRGB: Self = Self(15); pub const R8G8_UNORM: Self = Self(16); pub const R8G8_SNORM: Self = Self(17); pub const R8G8_USCALED: Self = Self(18); pub const R8G8_SSCALED: Self = Self(19); pub const R8G8_UINT: Self = Self(20); pub const R8G8_SINT: Self = Self(21); pub const R8G8_SRGB: Self = Self(22); pub const R8G8B8_UNORM: Self = Self(23); pub const R8G8B8_SNORM: Self = Self(24); pub const R8G8B8_USCALED: Self = Self(25); pub const R8G8B8_SSCALED: Self = Self(26); pub const R8G8B8_UINT: Self = Self(27); pub const R8G8B8_SINT: Self = Self(28); pub const R8G8B8_SRGB: Self = Self(29); pub const B8G8R8_UNORM: Self = Self(30); pub const B8G8R8_SNORM: Self = Self(31); pub const B8G8R8_USCALED: Self = Self(32); pub const B8G8R8_SSCALED: Self = Self(33); pub const B8G8R8_UINT: Self = Self(34); pub const B8G8R8_SINT: Self = Self(35); pub const B8G8R8_SRGB: Self = Self(36); pub const R8G8B8A8_UNORM: Self = Self(37); pub const R8G8B8A8_SNORM: Self = Self(38); pub const R8G8B8A8_USCALED: Self = Self(39); pub const R8G8B8A8_SSCALED: Self = Self(40); pub const R8G8B8A8_UINT: Self = Self(41); pub const R8G8B8A8_SINT: Self = Self(42); pub const R8G8B8A8_SRGB: Self = Self(43); pub const B8G8R8A8_UNORM: Self = Self(44); pub const B8G8R8A8_SNORM: Self = Self(45); pub const B8G8R8A8_USCALED: Self = Self(46); pub const B8G8R8A8_SSCALED: Self = Self(47); pub const B8G8R8A8_UINT: Self = Self(48); pub const B8G8R8A8_SINT: Self = Self(49); pub const B8G8R8A8_SRGB: Self = Self(50); pub const A8B8G8R8_UNORM_PACK32: Self = Self(51); pub const A8B8G8R8_SNORM_PACK32: Self = Self(52); pub const A8B8G8R8_USCALED_PACK32: Self = Self(53); pub const A8B8G8R8_SSCALED_PACK32: Self = Self(54); pub const A8B8G8R8_UINT_PACK32: Self = Self(55); pub const A8B8G8R8_SINT_PACK32: Self = Self(56); pub const A8B8G8R8_SRGB_PACK32: Self = Self(57); pub const A2R10G10B10_UNORM_PACK32: Self = Self(58); pub const A2R10G10B10_SNORM_PACK32: Self = Self(59); pub const A2R10G10B10_USCALED_PACK32: Self = Self(60); pub const A2R10G10B10_SSCALED_PACK32: Self = Self(61); pub const A2R10G10B10_UINT_PACK32: Self = Self(62); pub const A2R10G10B10_SINT_PACK32: Self = Self(63); pub const A2B10G10R10_UNORM_PACK32: Self = Self(64); pub const A2B10G10R10_SNORM_PACK32: Self = Self(65); pub const A2B10G10R10_USCALED_PACK32: Self = Self(66); pub const A2B10G10R10_SSCALED_PACK32: Self = Self(67); pub const A2B10G10R10_UINT_PACK32: Self = Self(68); pub const A2B10G10R10_SINT_PACK32: Self = Self(69); pub const R16_UNORM: Self = Self(70); pub const R16_SNORM: Self = Self(71); pub const R16_USCALED: Self = Self(72); pub const R16_SSCALED: Self = Self(73); pub const R16_UINT: Self = Self(74); pub const R16_SINT: Self = Self(75); pub const R16_SFLOAT: Self = Self(76); pub const R16G16_UNORM: Self = Self(77); pub const R16G16_SNORM: Self = Self(78); pub const R16G16_USCALED: Self = Self(79); pub const R16G16_SSCALED: Self = Self(80); pub const R16G16_UINT: Self = Self(81); pub const R16G16_SINT: Self = Self(82); pub const R16G16_SFLOAT: Self = Self(83); pub const R16G16B16_UNORM: Self = Self(84); pub const R16G16B16_SNORM: Self = Self(85); pub const R16G16B16_USCALED: Self = Self(86); pub const R16G16B16_SSCALED: Self = Self(87); pub const R16G16B16_UINT: Self = Self(88); pub const R16G16B16_SINT: Self = Self(89); pub const R16G16B16_SFLOAT: Self = Self(90); pub const R16G16B16A16_UNORM: Self = Self(91); pub const R16G16B16A16_SNORM: Self = Self(92); pub const R16G16B16A16_USCALED: Self = Self(93); pub const R16G16B16A16_SSCALED: Self = Self(94); pub const R16G16B16A16_UINT: Self = Self(95); pub const R16G16B16A16_SINT: Self = Self(96); pub const R16G16B16A16_SFLOAT: Self = Self(97); pub const R32_UINT: Self = Self(98); pub const R32_SINT: Self = Self(99); pub const R32_SFLOAT: Self = Self(100); pub const R32G32_UINT: Self = Self(101); pub const R32G32_SINT: Self = Self(102); pub const R32G32_SFLOAT: Self = Self(103); pub const R32G32B32_UINT: Self = Self(104); pub const R32G32B32_SINT: Self = Self(105); pub const R32G32B32_SFLOAT: Self = Self(106); pub const R32G32B32A32_UINT: Self = Self(107); pub const R32G32B32A32_SINT: Self = Self(108); pub const R32G32B32A32_SFLOAT: Self = Self(109); pub const R64_UINT: Self = Self(110); pub const R64_SINT: Self = Self(111); pub const R64_SFLOAT: Self = Self(112); pub const R64G64_UINT: Self = Self(113); pub const R64G64_SINT: Self = Self(114); pub const R64G64_SFLOAT: Self = Self(115); pub const R64G64B64_UINT: Self = Self(116); pub const R64G64B64_SINT: Self = Self(117); pub const R64G64B64_SFLOAT: Self = Self(118); pub const R64G64B64A64_UINT: Self = Self(119); pub const R64G64B64A64_SINT: Self = Self(120); pub const R64G64B64A64_SFLOAT: Self = Self(121); pub const B10G11R11_UFLOAT_PACK32: Self = Self(122); pub const E5B9G9R9_UFLOAT_PACK32: Self = Self(123); pub const D16_UNORM: Self = Self(124); pub const X8_D24_UNORM_PACK32: Self = Self(125); pub const D32_SFLOAT: Self = Self(126); pub const S8_UINT: Self = Self(127); pub const D16_UNORM_S8_UINT: Self = Self(128); pub const D24_UNORM_S8_UINT: Self = Self(129); pub const D32_SFLOAT_S8_UINT: Self = Self(130); pub const BC1_RGB_UNORM_BLOCK: Self = Self(131); pub const BC1_RGB_SRGB_BLOCK: Self = Self(132); pub const BC1_RGBA_UNORM_BLOCK: Self = Self(133); pub const BC1_RGBA_SRGB_BLOCK: Self = Self(134); pub const BC2_UNORM_BLOCK: Self = Self(135); pub const BC2_SRGB_BLOCK: Self = Self(136); pub const BC3_UNORM_BLOCK: Self = Self(137); pub const BC3_SRGB_BLOCK: Self = Self(138); pub const BC4_UNORM_BLOCK: Self = Self(139); pub const BC4_SNORM_BLOCK: Self = Self(140); pub const BC5_UNORM_BLOCK: Self = Self(141); pub const BC5_SNORM_BLOCK: Self = Self(142); pub const BC6H_UFLOAT_BLOCK: Self = Self(143); pub const BC6H_SFLOAT_BLOCK: Self = Self(144); pub const BC7_UNORM_BLOCK: Self = Self(145); pub const BC7_SRGB_BLOCK: Self = Self(146); pub const ETC2_R8G8B8_UNORM_BLOCK: Self = Self(147); pub const ETC2_R8G8B8_SRGB_BLOCK: Self = Self(148); pub const ETC2_R8G8B8A1_UNORM_BLOCK: Self = Self(149); pub const ETC2_R8G8B8A1_SRGB_BLOCK: Self = Self(150); pub const ETC2_R8G8B8A8_UNORM_BLOCK: Self = Self(151); pub const ETC2_R8G8B8A8_SRGB_BLOCK: Self = Self(152); pub const EAC_R11_UNORM_BLOCK: Self = Self(153); pub const EAC_R11_SNORM_BLOCK: Self = Self(154); pub const EAC_R11G11_UNORM_BLOCK: Self = Self(155); pub const EAC_R11G11_SNORM_BLOCK: Self = Self(156); pub const ASTC_4X4_UNORM_BLOCK: Self = Self(157); pub const ASTC_4X4_SRGB_BLOCK: Self = Self(158); pub const ASTC_5X4_UNORM_BLOCK: Self = Self(159); pub const ASTC_5X4_SRGB_BLOCK: Self = Self(160); pub const ASTC_5X5_UNORM_BLOCK: Self = Self(161); pub const ASTC_5X5_SRGB_BLOCK: Self = Self(162); pub const ASTC_6X5_UNORM_BLOCK: Self = Self(163); pub const ASTC_6X5_SRGB_BLOCK: Self = Self(164); pub const ASTC_6X6_UNORM_BLOCK: Self = Self(165); pub const ASTC_6X6_SRGB_BLOCK: Self = Self(166); pub const ASTC_8X5_UNORM_BLOCK: Self = Self(167); pub const ASTC_8X5_SRGB_BLOCK: Self = Self(168); pub const ASTC_8X6_UNORM_BLOCK: Self = Self(169); pub const ASTC_8X6_SRGB_BLOCK: Self = Self(170); pub const ASTC_8X8_UNORM_BLOCK: Self = Self(171); pub const ASTC_8X8_SRGB_BLOCK: Self = Self(172); pub const ASTC_10X5_UNORM_BLOCK: Self = Self(173); pub const ASTC_10X5_SRGB_BLOCK: Self = Self(174); pub const ASTC_10X6_UNORM_BLOCK: Self = Self(175); pub const ASTC_10X6_SRGB_BLOCK: Self = Self(176); pub const ASTC_10X8_UNORM_BLOCK: Self = Self(177); pub const ASTC_10X8_SRGB_BLOCK: Self = Self(178); pub const ASTC_10X10_UNORM_BLOCK: Self = Self(179); pub const ASTC_10X10_SRGB_BLOCK: Self = Self(180); pub const ASTC_12X10_UNORM_BLOCK: Self = Self(181); pub const ASTC_12X10_SRGB_BLOCK: Self = Self(182); pub const ASTC_12X12_UNORM_BLOCK: Self = Self(183); pub const ASTC_12X12_SRGB_BLOCK: Self = Self(184); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct StructureType(pub(crate) i32); impl StructureType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl StructureType { pub const APPLICATION_INFO: Self = Self(0); pub const INSTANCE_CREATE_INFO: Self = Self(1); pub const DEVICE_QUEUE_CREATE_INFO: Self = Self(2); pub const DEVICE_CREATE_INFO: Self = Self(3); pub const SUBMIT_INFO: Self = Self(4); pub const MEMORY_ALLOCATE_INFO: Self = Self(5); pub const MAPPED_MEMORY_RANGE: Self = Self(6); pub const BIND_SPARSE_INFO: Self = Self(7); pub const FENCE_CREATE_INFO: Self = Self(8); pub const SEMAPHORE_CREATE_INFO: Self = Self(9); pub const EVENT_CREATE_INFO: Self = Self(10); pub const QUERY_POOL_CREATE_INFO: Self = Self(11); pub const BUFFER_CREATE_INFO: Self = Self(12); pub const BUFFER_VIEW_CREATE_INFO: Self = Self(13); pub const IMAGE_CREATE_INFO: Self = Self(14); pub const IMAGE_VIEW_CREATE_INFO: Self = Self(15); pub const SHADER_MODULE_CREATE_INFO: Self = Self(16); pub const PIPELINE_CACHE_CREATE_INFO: Self = Self(17); pub const PIPELINE_SHADER_STAGE_CREATE_INFO: Self = Self(18); pub const PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO: Self = Self(19); pub const PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO: Self = Self(20); pub const PIPELINE_TESSELLATION_STATE_CREATE_INFO: Self = Self(21); pub const PIPELINE_VIEWPORT_STATE_CREATE_INFO: Self = Self(22); pub const PIPELINE_RASTERIZATION_STATE_CREATE_INFO: Self = Self(23); pub const PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: Self = Self(24); pub const PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO: Self = Self(25); pub const PIPELINE_COLOR_BLEND_STATE_CREATE_INFO: Self = Self(26); pub const PIPELINE_DYNAMIC_STATE_CREATE_INFO: Self = Self(27); pub const GRAPHICS_PIPELINE_CREATE_INFO: Self = Self(28); pub const COMPUTE_PIPELINE_CREATE_INFO: Self = Self(29); pub const PIPELINE_LAYOUT_CREATE_INFO: Self = Self(30); pub const SAMPLER_CREATE_INFO: Self = Self(31); pub const DESCRIPTOR_SET_LAYOUT_CREATE_INFO: Self = Self(32); pub const DESCRIPTOR_POOL_CREATE_INFO: Self = Self(33); pub const DESCRIPTOR_SET_ALLOCATE_INFO: Self = Self(34); pub const WRITE_DESCRIPTOR_SET: Self = Self(35); pub const COPY_DESCRIPTOR_SET: Self = Self(36); pub const FRAMEBUFFER_CREATE_INFO: Self = Self(37); pub const RENDER_PASS_CREATE_INFO: Self = Self(38); pub const COMMAND_POOL_CREATE_INFO: Self = Self(39); pub const COMMAND_BUFFER_ALLOCATE_INFO: Self = Self(40); pub const COMMAND_BUFFER_INHERITANCE_INFO: Self = Self(41); pub const COMMAND_BUFFER_BEGIN_INFO: Self = Self(42); pub const RENDER_PASS_BEGIN_INFO: Self = Self(43); pub const BUFFER_MEMORY_BARRIER: Self = Self(44); pub const IMAGE_MEMORY_BARRIER: Self = Self(45); pub const MEMORY_BARRIER: Self = Self(46); #[doc = "Reserved for internal use by the loader, layers, and ICDs"] pub const LOADER_INSTANCE_CREATE_INFO: Self = Self(47); #[doc = "Reserved for internal use by the loader, layers, and ICDs"] pub const LOADER_DEVICE_CREATE_INFO: Self = Self(48); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SubpassContents(pub(crate) i32); impl SubpassContents { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SubpassContents { pub const INLINE: Self = Self(0); pub const SECONDARY_COMMAND_BUFFERS: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] #[must_use] pub struct Result(pub(crate) i32); impl Result { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl Result { #[doc = "Command completed successfully"] pub const SUCCESS: Self = Self(0); #[doc = "A fence or query has not yet completed"] pub const NOT_READY: Self = Self(1); #[doc = "A wait operation has not completed in the specified time"] pub const TIMEOUT: Self = Self(2); #[doc = "An event is signaled"] pub const EVENT_SET: Self = Self(3); #[doc = "An event is unsignaled"] pub const EVENT_RESET: Self = Self(4); #[doc = "A return array was too small for the result"] pub const INCOMPLETE: Self = Self(5); #[doc = "A host memory allocation has failed"] pub const ERROR_OUT_OF_HOST_MEMORY: Self = Self(-1); #[doc = "A device memory allocation has failed"] pub const ERROR_OUT_OF_DEVICE_MEMORY: Self = Self(-2); #[doc = "Initialization of an object has failed"] pub const ERROR_INITIALIZATION_FAILED: Self = Self(-3); #[doc = "The logical device has been lost. See "] pub const ERROR_DEVICE_LOST: Self = Self(-4); #[doc = "Mapping of a memory object has failed"] pub const ERROR_MEMORY_MAP_FAILED: Self = Self(-5); #[doc = "Layer specified does not exist"] pub const ERROR_LAYER_NOT_PRESENT: Self = Self(-6); #[doc = "Extension specified does not exist"] pub const ERROR_EXTENSION_NOT_PRESENT: Self = Self(-7); #[doc = "Requested feature is not available on this device"] pub const ERROR_FEATURE_NOT_PRESENT: Self = Self(-8); #[doc = "Unable to find a Vulkan driver"] pub const ERROR_INCOMPATIBLE_DRIVER: Self = Self(-9); #[doc = "Too many objects of the type have already been created"] pub const ERROR_TOO_MANY_OBJECTS: Self = Self(-10); #[doc = "Requested format is not supported on this device"] pub const ERROR_FORMAT_NOT_SUPPORTED: Self = Self(-11); #[doc = "A requested pool allocation has failed due to fragmentation of the pool's memory"] pub const ERROR_FRAGMENTED_POOL: Self = Self(-12); #[doc = "An unknown error has occurred, due to an implementation or application bug"] pub const ERROR_UNKNOWN: Self = Self(-13); } #[cfg(feature = "std")] impl std::error::Error for Result {} impl fmt::Display for Result { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match * self { Self :: SUCCESS => Some ("Command completed successfully") , Self :: NOT_READY => Some ("A fence or query has not yet completed") , Self :: TIMEOUT => Some ("A wait operation has not completed in the specified time") , Self :: EVENT_SET => Some ("An event is signaled") , Self :: EVENT_RESET => Some ("An event is unsignaled") , Self :: INCOMPLETE => Some ("A return array was too small for the result") , Self :: ERROR_OUT_OF_HOST_MEMORY => Some ("A host memory allocation has failed") , Self :: ERROR_OUT_OF_DEVICE_MEMORY => Some ("A device memory allocation has failed") , Self :: ERROR_INITIALIZATION_FAILED => Some ("Initialization of an object has failed") , Self :: ERROR_DEVICE_LOST => Some ("The logical device has been lost. See ") , Self :: ERROR_MEMORY_MAP_FAILED => Some ("Mapping of a memory object has failed") , Self :: ERROR_LAYER_NOT_PRESENT => Some ("Layer specified does not exist") , Self :: ERROR_EXTENSION_NOT_PRESENT => Some ("Extension specified does not exist") , Self :: ERROR_FEATURE_NOT_PRESENT => Some ("Requested feature is not available on this device") , Self :: ERROR_INCOMPATIBLE_DRIVER => Some ("Unable to find a Vulkan driver") , Self :: ERROR_TOO_MANY_OBJECTS => Some ("Too many objects of the type have already been created") , Self :: ERROR_FORMAT_NOT_SUPPORTED => Some ("Requested format is not supported on this device") , Self :: ERROR_FRAGMENTED_POOL => Some ("A requested pool allocation has failed due to fragmentation of the pool's memory") , Self :: ERROR_UNKNOWN => Some ("An unknown error has occurred, due to an implementation or application bug") , _ => None , } ; if let Some(x) = name { fmt.write_str(x) } else { ::fmt(self, fmt) } } } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DynamicState(pub(crate) i32); impl DynamicState { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DynamicState { pub const VIEWPORT: Self = Self(0); pub const SCISSOR: Self = Self(1); pub const LINE_WIDTH: Self = Self(2); pub const DEPTH_BIAS: Self = Self(3); pub const BLEND_CONSTANTS: Self = Self(4); pub const DEPTH_BOUNDS: Self = Self(5); pub const STENCIL_COMPARE_MASK: Self = Self(6); pub const STENCIL_WRITE_MASK: Self = Self(7); pub const STENCIL_REFERENCE: Self = Self(8); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DescriptorUpdateTemplateType(pub(crate) i32); impl DescriptorUpdateTemplateType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DescriptorUpdateTemplateType { #[doc = "Create descriptor update template for descriptor set updates"] pub const DESCRIPTOR_SET: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ObjectType(pub(crate) i32); impl ObjectType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ObjectType { pub const UNKNOWN: Self = Self(0); pub const INSTANCE: Self = Self(1); pub const PHYSICAL_DEVICE: Self = Self(2); pub const DEVICE: Self = Self(3); pub const QUEUE: Self = Self(4); pub const SEMAPHORE: Self = Self(5); pub const COMMAND_BUFFER: Self = Self(6); pub const FENCE: Self = Self(7); pub const DEVICE_MEMORY: Self = Self(8); pub const BUFFER: Self = Self(9); pub const IMAGE: Self = Self(10); pub const EVENT: Self = Self(11); pub const QUERY_POOL: Self = Self(12); pub const BUFFER_VIEW: Self = Self(13); pub const IMAGE_VIEW: Self = Self(14); pub const SHADER_MODULE: Self = Self(15); pub const PIPELINE_CACHE: Self = Self(16); pub const PIPELINE_LAYOUT: Self = Self(17); pub const RENDER_PASS: Self = Self(18); pub const PIPELINE: Self = Self(19); pub const DESCRIPTOR_SET_LAYOUT: Self = Self(20); pub const SAMPLER: Self = Self(21); pub const DESCRIPTOR_POOL: Self = Self(22); pub const DESCRIPTOR_SET: Self = Self(23); pub const FRAMEBUFFER: Self = Self(24); pub const COMMAND_POOL: Self = Self(25); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct RayTracingInvocationReorderModeNV(pub(crate) i32); impl RayTracingInvocationReorderModeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl RayTracingInvocationReorderModeNV { pub const NONE: Self = Self(0); pub const REORDER: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DirectDriverLoadingModeLUNARG(pub(crate) i32); impl DirectDriverLoadingModeLUNARG { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DirectDriverLoadingModeLUNARG { pub const EXCLUSIVE: Self = Self(0); pub const INCLUSIVE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SemaphoreType(pub(crate) i32); impl SemaphoreType { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SemaphoreType { pub const BINARY: Self = Self(0); pub const TIMELINE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PresentModeKHR(pub(crate) i32); impl PresentModeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PresentModeKHR { pub const IMMEDIATE: Self = Self(0); pub const MAILBOX: Self = Self(1); pub const FIFO: Self = Self(2); pub const FIFO_RELAXED: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ColorSpaceKHR(pub(crate) i32); impl ColorSpaceKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ColorSpaceKHR { pub const SRGB_NONLINEAR: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct TimeDomainKHR(pub(crate) i32); impl TimeDomainKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl TimeDomainKHR { pub const DEVICE: Self = Self(0); pub const CLOCK_MONOTONIC: Self = Self(1); pub const CLOCK_MONOTONIC_RAW: Self = Self(2); pub const QUERY_PERFORMANCE_COUNTER: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DebugReportObjectTypeEXT(pub(crate) i32); impl DebugReportObjectTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DebugReportObjectTypeEXT { pub const UNKNOWN: Self = Self(0); pub const INSTANCE: Self = Self(1); pub const PHYSICAL_DEVICE: Self = Self(2); pub const DEVICE: Self = Self(3); pub const QUEUE: Self = Self(4); pub const SEMAPHORE: Self = Self(5); pub const COMMAND_BUFFER: Self = Self(6); pub const FENCE: Self = Self(7); pub const DEVICE_MEMORY: Self = Self(8); pub const BUFFER: Self = Self(9); pub const IMAGE: Self = Self(10); pub const EVENT: Self = Self(11); pub const QUERY_POOL: Self = Self(12); pub const BUFFER_VIEW: Self = Self(13); pub const IMAGE_VIEW: Self = Self(14); pub const SHADER_MODULE: Self = Self(15); pub const PIPELINE_CACHE: Self = Self(16); pub const PIPELINE_LAYOUT: Self = Self(17); pub const RENDER_PASS: Self = Self(18); pub const PIPELINE: Self = Self(19); pub const DESCRIPTOR_SET_LAYOUT: Self = Self(20); pub const SAMPLER: Self = Self(21); pub const DESCRIPTOR_POOL: Self = Self(22); pub const DESCRIPTOR_SET: Self = Self(23); pub const FRAMEBUFFER: Self = Self(24); pub const COMMAND_POOL: Self = Self(25); pub const SURFACE_KHR: Self = Self(26); pub const SWAPCHAIN_KHR: Self = Self(27); pub const DEBUG_REPORT_CALLBACK_EXT: Self = Self(28); pub const DISPLAY_KHR: Self = Self(29); pub const DISPLAY_MODE_KHR: Self = Self(30); pub const VALIDATION_CACHE_EXT: Self = Self(33); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DeviceMemoryReportEventTypeEXT(pub(crate) i32); impl DeviceMemoryReportEventTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DeviceMemoryReportEventTypeEXT { pub const ALLOCATE: Self = Self(0); pub const FREE: Self = Self(1); pub const IMPORT: Self = Self(2); pub const UNIMPORT: Self = Self(3); pub const ALLOCATION_FAILED: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct RasterizationOrderAMD(pub(crate) i32); impl RasterizationOrderAMD { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl RasterizationOrderAMD { pub const STRICT: Self = Self(0); pub const RELAXED: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ValidationCheckEXT(pub(crate) i32); impl ValidationCheckEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ValidationCheckEXT { pub const ALL: Self = Self(0); pub const SHADERS: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ValidationFeatureEnableEXT(pub(crate) i32); impl ValidationFeatureEnableEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ValidationFeatureEnableEXT { pub const GPU_ASSISTED: Self = Self(0); pub const GPU_ASSISTED_RESERVE_BINDING_SLOT: Self = Self(1); pub const BEST_PRACTICES: Self = Self(2); pub const DEBUG_PRINTF: Self = Self(3); pub const SYNCHRONIZATION_VALIDATION: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ValidationFeatureDisableEXT(pub(crate) i32); impl ValidationFeatureDisableEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ValidationFeatureDisableEXT { pub const ALL: Self = Self(0); pub const SHADERS: Self = Self(1); pub const THREAD_SAFETY: Self = Self(2); pub const API_PARAMETERS: Self = Self(3); pub const OBJECT_LIFETIMES: Self = Self(4); pub const CORE_CHECKS: Self = Self(5); pub const UNIQUE_HANDLES: Self = Self(6); pub const SHADER_VALIDATION_CACHE: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct LayerSettingTypeEXT(pub(crate) i32); impl LayerSettingTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl LayerSettingTypeEXT { pub const BOOL32: Self = Self(0); pub const INT32: Self = Self(1); pub const INT64: Self = Self(2); pub const UINT32: Self = Self(3); pub const UINT64: Self = Self(4); pub const FLOAT32: Self = Self(5); pub const FLOAT64: Self = Self(6); pub const STRING: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct IndirectCommandsTokenTypeNV(pub(crate) i32); impl IndirectCommandsTokenTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl IndirectCommandsTokenTypeNV { pub const SHADER_GROUP: Self = Self(0); pub const STATE_FLAGS: Self = Self(1); pub const INDEX_BUFFER: Self = Self(2); pub const VERTEX_BUFFER: Self = Self(3); pub const PUSH_CONSTANT: Self = Self(4); pub const DRAW_INDEXED: Self = Self(5); pub const DRAW: Self = Self(6); pub const DRAW_TASKS: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DisplayPowerStateEXT(pub(crate) i32); impl DisplayPowerStateEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DisplayPowerStateEXT { pub const OFF: Self = Self(0); pub const SUSPEND: Self = Self(1); pub const ON: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DeviceEventTypeEXT(pub(crate) i32); impl DeviceEventTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DeviceEventTypeEXT { pub const DISPLAY_HOTPLUG: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DisplayEventTypeEXT(pub(crate) i32); impl DisplayEventTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DisplayEventTypeEXT { pub const FIRST_PIXEL_OUT: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ViewportCoordinateSwizzleNV(pub(crate) i32); impl ViewportCoordinateSwizzleNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ViewportCoordinateSwizzleNV { pub const POSITIVE_X: Self = Self(0); pub const NEGATIVE_X: Self = Self(1); pub const POSITIVE_Y: Self = Self(2); pub const NEGATIVE_Y: Self = Self(3); pub const POSITIVE_Z: Self = Self(4); pub const NEGATIVE_Z: Self = Self(5); pub const POSITIVE_W: Self = Self(6); pub const NEGATIVE_W: Self = Self(7); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DiscardRectangleModeEXT(pub(crate) i32); impl DiscardRectangleModeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DiscardRectangleModeEXT { pub const INCLUSIVE: Self = Self(0); pub const EXCLUSIVE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PointClippingBehavior(pub(crate) i32); impl PointClippingBehavior { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PointClippingBehavior { pub const ALL_CLIP_PLANES: Self = Self(0); pub const USER_CLIP_PLANES_ONLY: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SamplerReductionMode(pub(crate) i32); impl SamplerReductionMode { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SamplerReductionMode { pub const WEIGHTED_AVERAGE: Self = Self(0); pub const MIN: Self = Self(1); pub const MAX: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct TessellationDomainOrigin(pub(crate) i32); impl TessellationDomainOrigin { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl TessellationDomainOrigin { pub const UPPER_LEFT: Self = Self(0); pub const LOWER_LEFT: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SamplerYcbcrModelConversion(pub(crate) i32); impl SamplerYcbcrModelConversion { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SamplerYcbcrModelConversion { pub const RGB_IDENTITY: Self = Self(0); #[doc = "just range expansion"] pub const YCBCR_IDENTITY: Self = Self(1); #[doc = "aka HD YUV"] pub const YCBCR_709: Self = Self(2); #[doc = "aka SD YUV"] pub const YCBCR_601: Self = Self(3); #[doc = "aka UHD YUV"] pub const YCBCR_2020: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SamplerYcbcrRange(pub(crate) i32); impl SamplerYcbcrRange { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SamplerYcbcrRange { #[doc = "Luma 0..1 maps to 0..255, chroma -0.5..0.5 to 1..255 (clamped)"] pub const ITU_FULL: Self = Self(0); #[doc = "Luma 0..1 maps to 16..235, chroma -0.5..0.5 to 16..240"] pub const ITU_NARROW: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ChromaLocation(pub(crate) i32); impl ChromaLocation { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ChromaLocation { pub const COSITED_EVEN: Self = Self(0); pub const MIDPOINT: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BlendOverlapEXT(pub(crate) i32); impl BlendOverlapEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BlendOverlapEXT { pub const UNCORRELATED: Self = Self(0); pub const DISJOINT: Self = Self(1); pub const CONJOINT: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CoverageModulationModeNV(pub(crate) i32); impl CoverageModulationModeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CoverageModulationModeNV { pub const NONE: Self = Self(0); pub const RGB: Self = Self(1); pub const ALPHA: Self = Self(2); pub const RGBA: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CoverageReductionModeNV(pub(crate) i32); impl CoverageReductionModeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CoverageReductionModeNV { pub const MERGE: Self = Self(0); pub const TRUNCATE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ValidationCacheHeaderVersionEXT(pub(crate) i32); impl ValidationCacheHeaderVersionEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ValidationCacheHeaderVersionEXT { pub const ONE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ShaderInfoTypeAMD(pub(crate) i32); impl ShaderInfoTypeAMD { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ShaderInfoTypeAMD { pub const STATISTICS: Self = Self(0); pub const BINARY: Self = Self(1); pub const DISASSEMBLY: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct QueueGlobalPriorityKHR(pub(crate) i32); impl QueueGlobalPriorityKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl QueueGlobalPriorityKHR { pub const LOW: Self = Self(128); pub const MEDIUM: Self = Self(256); pub const HIGH: Self = Self(512); pub const REALTIME: Self = Self(1_024); pub const LOW_EXT: Self = Self::LOW; pub const MEDIUM_EXT: Self = Self::MEDIUM; pub const HIGH_EXT: Self = Self::HIGH; pub const REALTIME_EXT: Self = Self::REALTIME; } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ConservativeRasterizationModeEXT(pub(crate) i32); impl ConservativeRasterizationModeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ConservativeRasterizationModeEXT { pub const DISABLED: Self = Self(0); pub const OVERESTIMATE: Self = Self(1); pub const UNDERESTIMATE: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct VendorId(pub(crate) i32); impl VendorId { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl VendorId { #[doc = "Vivante vendor ID"] pub const VIV: Self = Self(0x1_0001); #[doc = "VeriSilicon vendor ID"] pub const VSI: Self = Self(0x1_0002); #[doc = "Kazan Software Renderer"] pub const KAZAN: Self = Self(0x1_0003); #[doc = "Codeplay Software Ltd. vendor ID"] pub const CODEPLAY: Self = Self(0x1_0004); #[doc = "Mesa vendor ID"] pub const MESA: Self = Self(0x1_0005); #[doc = "PoCL vendor ID"] pub const POCL: Self = Self(0x1_0006); #[doc = "Mobileye vendor ID"] pub const MOBILEYE: Self = Self(0x1_0007); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DriverId(pub(crate) i32); impl DriverId { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DriverId { #[doc = "Advanced Micro Devices, Inc."] pub const AMD_PROPRIETARY: Self = Self(1); #[doc = "Advanced Micro Devices, Inc."] pub const AMD_OPEN_SOURCE: Self = Self(2); #[doc = "Mesa open source project"] pub const MESA_RADV: Self = Self(3); #[doc = "NVIDIA Corporation"] pub const NVIDIA_PROPRIETARY: Self = Self(4); #[doc = "Intel Corporation"] pub const INTEL_PROPRIETARY_WINDOWS: Self = Self(5); #[doc = "Intel Corporation"] pub const INTEL_OPEN_SOURCE_MESA: Self = Self(6); #[doc = "Imagination Technologies"] pub const IMAGINATION_PROPRIETARY: Self = Self(7); #[doc = "Qualcomm Technologies, Inc."] pub const QUALCOMM_PROPRIETARY: Self = Self(8); #[doc = "Arm Limited"] pub const ARM_PROPRIETARY: Self = Self(9); #[doc = "Google LLC"] pub const GOOGLE_SWIFTSHADER: Self = Self(10); #[doc = "Google LLC"] pub const GGP_PROPRIETARY: Self = Self(11); #[doc = "Broadcom Inc."] pub const BROADCOM_PROPRIETARY: Self = Self(12); #[doc = "Mesa"] pub const MESA_LLVMPIPE: Self = Self(13); #[doc = "MoltenVK"] pub const MOLTENVK: Self = Self(14); #[doc = "Core Avionics & Industrial Inc."] pub const COREAVI_PROPRIETARY: Self = Self(15); #[doc = "Juice Technologies, Inc."] pub const JUICE_PROPRIETARY: Self = Self(16); #[doc = "Verisilicon, Inc."] pub const VERISILICON_PROPRIETARY: Self = Self(17); #[doc = "Mesa open source project"] pub const MESA_TURNIP: Self = Self(18); #[doc = "Mesa open source project"] pub const MESA_V3DV: Self = Self(19); #[doc = "Mesa open source project"] pub const MESA_PANVK: Self = Self(20); #[doc = "Samsung Electronics Co., Ltd."] pub const SAMSUNG_PROPRIETARY: Self = Self(21); #[doc = "Mesa open source project"] pub const MESA_VENUS: Self = Self(22); #[doc = "Mesa open source project"] pub const MESA_DOZEN: Self = Self(23); #[doc = "Mesa open source project"] pub const MESA_NVK: Self = Self(24); #[doc = "Imagination Technologies"] pub const IMAGINATION_OPEN_SOURCE_MESA: Self = Self(25); #[doc = "Mesa open source project"] pub const MESA_AGXV: Self = Self(26); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ShadingRatePaletteEntryNV(pub(crate) i32); impl ShadingRatePaletteEntryNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ShadingRatePaletteEntryNV { pub const NO_INVOCATIONS: Self = Self(0); pub const TYPE_16_INVOCATIONS_PER_PIXEL: Self = Self(1); pub const TYPE_8_INVOCATIONS_PER_PIXEL: Self = Self(2); pub const TYPE_4_INVOCATIONS_PER_PIXEL: Self = Self(3); pub const TYPE_2_INVOCATIONS_PER_PIXEL: Self = Self(4); pub const TYPE_1_INVOCATION_PER_PIXEL: Self = Self(5); pub const TYPE_1_INVOCATION_PER_2X1_PIXELS: Self = Self(6); pub const TYPE_1_INVOCATION_PER_1X2_PIXELS: Self = Self(7); pub const TYPE_1_INVOCATION_PER_2X2_PIXELS: Self = Self(8); pub const TYPE_1_INVOCATION_PER_4X2_PIXELS: Self = Self(9); pub const TYPE_1_INVOCATION_PER_2X4_PIXELS: Self = Self(10); pub const TYPE_1_INVOCATION_PER_4X4_PIXELS: Self = Self(11); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CoarseSampleOrderTypeNV(pub(crate) i32); impl CoarseSampleOrderTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CoarseSampleOrderTypeNV { pub const DEFAULT: Self = Self(0); pub const CUSTOM: Self = Self(1); pub const PIXEL_MAJOR: Self = Self(2); pub const SAMPLE_MAJOR: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CopyAccelerationStructureModeKHR(pub(crate) i32); impl CopyAccelerationStructureModeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CopyAccelerationStructureModeKHR { pub const CLONE: Self = Self(0); pub const COMPACT: Self = Self(1); pub const SERIALIZE: Self = Self(2); pub const DESERIALIZE: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BuildAccelerationStructureModeKHR(pub(crate) i32); impl BuildAccelerationStructureModeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BuildAccelerationStructureModeKHR { pub const BUILD: Self = Self(0); pub const UPDATE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AccelerationStructureTypeKHR(pub(crate) i32); impl AccelerationStructureTypeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AccelerationStructureTypeKHR { pub const TOP_LEVEL: Self = Self(0); pub const BOTTOM_LEVEL: Self = Self(1); pub const GENERIC: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct GeometryTypeKHR(pub(crate) i32); impl GeometryTypeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl GeometryTypeKHR { pub const TRIANGLES: Self = Self(0); pub const AABBS: Self = Self(1); pub const INSTANCES: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AccelerationStructureMemoryRequirementsTypeNV(pub(crate) i32); impl AccelerationStructureMemoryRequirementsTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AccelerationStructureMemoryRequirementsTypeNV { pub const OBJECT: Self = Self(0); pub const BUILD_SCRATCH: Self = Self(1); pub const UPDATE_SCRATCH: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AccelerationStructureBuildTypeKHR(pub(crate) i32); impl AccelerationStructureBuildTypeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AccelerationStructureBuildTypeKHR { pub const HOST: Self = Self(0); pub const DEVICE: Self = Self(1); pub const HOST_OR_DEVICE: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct RayTracingShaderGroupTypeKHR(pub(crate) i32); impl RayTracingShaderGroupTypeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl RayTracingShaderGroupTypeKHR { pub const GENERAL: Self = Self(0); pub const TRIANGLES_HIT_GROUP: Self = Self(1); pub const PROCEDURAL_HIT_GROUP: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AccelerationStructureCompatibilityKHR(pub(crate) i32); impl AccelerationStructureCompatibilityKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AccelerationStructureCompatibilityKHR { pub const COMPATIBLE: Self = Self(0); pub const INCOMPATIBLE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ShaderGroupShaderKHR(pub(crate) i32); impl ShaderGroupShaderKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ShaderGroupShaderKHR { pub const GENERAL: Self = Self(0); pub const CLOSEST_HIT: Self = Self(1); pub const ANY_HIT: Self = Self(2); pub const INTERSECTION: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct MemoryOverallocationBehaviorAMD(pub(crate) i32); impl MemoryOverallocationBehaviorAMD { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl MemoryOverallocationBehaviorAMD { pub const DEFAULT: Self = Self(0); pub const ALLOWED: Self = Self(1); pub const DISALLOWED: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct FullScreenExclusiveEXT(pub(crate) i32); impl FullScreenExclusiveEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl FullScreenExclusiveEXT { pub const DEFAULT: Self = Self(0); pub const ALLOWED: Self = Self(1); pub const DISALLOWED: Self = Self(2); pub const APPLICATION_CONTROLLED: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceCounterScopeKHR(pub(crate) i32); impl PerformanceCounterScopeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceCounterScopeKHR { pub const COMMAND_BUFFER: Self = Self(0); pub const RENDER_PASS: Self = Self(1); pub const COMMAND: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceCounterUnitKHR(pub(crate) i32); impl PerformanceCounterUnitKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceCounterUnitKHR { pub const GENERIC: Self = Self(0); pub const PERCENTAGE: Self = Self(1); pub const NANOSECONDS: Self = Self(2); pub const BYTES: Self = Self(3); pub const BYTES_PER_SECOND: Self = Self(4); pub const KELVIN: Self = Self(5); pub const WATTS: Self = Self(6); pub const VOLTS: Self = Self(7); pub const AMPS: Self = Self(8); pub const HERTZ: Self = Self(9); pub const CYCLES: Self = Self(10); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceCounterStorageKHR(pub(crate) i32); impl PerformanceCounterStorageKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceCounterStorageKHR { pub const INT32: Self = Self(0); pub const INT64: Self = Self(1); pub const UINT32: Self = Self(2); pub const UINT64: Self = Self(3); pub const FLOAT32: Self = Self(4); pub const FLOAT64: Self = Self(5); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceConfigurationTypeINTEL(pub(crate) i32); impl PerformanceConfigurationTypeINTEL { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceConfigurationTypeINTEL { pub const COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct QueryPoolSamplingModeINTEL(pub(crate) i32); impl QueryPoolSamplingModeINTEL { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl QueryPoolSamplingModeINTEL { pub const MANUAL: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceOverrideTypeINTEL(pub(crate) i32); impl PerformanceOverrideTypeINTEL { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceOverrideTypeINTEL { pub const NULL_HARDWARE: Self = Self(0); pub const FLUSH_GPU_CACHES: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceParameterTypeINTEL(pub(crate) i32); impl PerformanceParameterTypeINTEL { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceParameterTypeINTEL { pub const HW_COUNTERS_SUPPORTED: Self = Self(0); pub const STREAM_MARKER_VALIDS: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PerformanceValueTypeINTEL(pub(crate) i32); impl PerformanceValueTypeINTEL { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PerformanceValueTypeINTEL { pub const UINT32: Self = Self(0); pub const UINT64: Self = Self(1); pub const FLOAT: Self = Self(2); pub const BOOL: Self = Self(3); pub const STRING: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ShaderFloatControlsIndependence(pub(crate) i32); impl ShaderFloatControlsIndependence { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ShaderFloatControlsIndependence { pub const TYPE_32_ONLY: Self = Self(0); pub const ALL: Self = Self(1); pub const NONE: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PipelineExecutableStatisticFormatKHR(pub(crate) i32); impl PipelineExecutableStatisticFormatKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PipelineExecutableStatisticFormatKHR { pub const BOOL32: Self = Self(0); pub const INT64: Self = Self(1); pub const UINT64: Self = Self(2); pub const FLOAT64: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct LineRasterizationModeKHR(pub(crate) i32); impl LineRasterizationModeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl LineRasterizationModeKHR { pub const DEFAULT: Self = Self(0); pub const DEFAULT_EXT: Self = Self::DEFAULT; pub const RECTANGULAR: Self = Self(1); pub const RECTANGULAR_EXT: Self = Self::RECTANGULAR; pub const BRESENHAM: Self = Self(2); pub const BRESENHAM_EXT: Self = Self::BRESENHAM; pub const RECTANGULAR_SMOOTH: Self = Self(3); pub const RECTANGULAR_SMOOTH_EXT: Self = Self::RECTANGULAR_SMOOTH; } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct FragmentShadingRateCombinerOpKHR(pub(crate) i32); impl FragmentShadingRateCombinerOpKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl FragmentShadingRateCombinerOpKHR { pub const KEEP: Self = Self(0); pub const REPLACE: Self = Self(1); pub const MIN: Self = Self(2); pub const MAX: Self = Self(3); pub const MUL: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct FragmentShadingRateNV(pub(crate) i32); impl FragmentShadingRateNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl FragmentShadingRateNV { pub const TYPE_1_INVOCATION_PER_PIXEL: Self = Self(0); pub const TYPE_1_INVOCATION_PER_1X2_PIXELS: Self = Self(1); pub const TYPE_1_INVOCATION_PER_2X1_PIXELS: Self = Self(4); pub const TYPE_1_INVOCATION_PER_2X2_PIXELS: Self = Self(5); pub const TYPE_1_INVOCATION_PER_2X4_PIXELS: Self = Self(6); pub const TYPE_1_INVOCATION_PER_4X2_PIXELS: Self = Self(9); pub const TYPE_1_INVOCATION_PER_4X4_PIXELS: Self = Self(10); pub const TYPE_2_INVOCATIONS_PER_PIXEL: Self = Self(11); pub const TYPE_4_INVOCATIONS_PER_PIXEL: Self = Self(12); pub const TYPE_8_INVOCATIONS_PER_PIXEL: Self = Self(13); pub const TYPE_16_INVOCATIONS_PER_PIXEL: Self = Self(14); pub const NO_INVOCATIONS: Self = Self(15); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct FragmentShadingRateTypeNV(pub(crate) i32); impl FragmentShadingRateTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl FragmentShadingRateTypeNV { pub const FRAGMENT_SIZE: Self = Self(0); pub const ENUMS: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct SubpassMergeStatusEXT(pub(crate) i32); impl SubpassMergeStatusEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl SubpassMergeStatusEXT { pub const MERGED: Self = Self(0); pub const DISALLOWED: Self = Self(1); pub const NOT_MERGED_SIDE_EFFECTS: Self = Self(2); pub const NOT_MERGED_SAMPLES_MISMATCH: Self = Self(3); pub const NOT_MERGED_VIEWS_MISMATCH: Self = Self(4); pub const NOT_MERGED_ALIASING: Self = Self(5); pub const NOT_MERGED_DEPENDENCIES: Self = Self(6); pub const NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT: Self = Self(7); pub const NOT_MERGED_TOO_MANY_ATTACHMENTS: Self = Self(8); pub const NOT_MERGED_INSUFFICIENT_STORAGE: Self = Self(9); pub const NOT_MERGED_DEPTH_STENCIL_COUNT: Self = Self(10); pub const NOT_MERGED_RESOLVE_ATTACHMENT_REUSE: Self = Self(11); pub const NOT_MERGED_SINGLE_SUBPASS: Self = Self(12); pub const NOT_MERGED_UNSPECIFIED: Self = Self(13); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ProvokingVertexModeEXT(pub(crate) i32); impl ProvokingVertexModeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ProvokingVertexModeEXT { pub const FIRST_VERTEX: Self = Self(0); pub const LAST_VERTEX: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct AccelerationStructureMotionInstanceTypeNV(pub(crate) i32); impl AccelerationStructureMotionInstanceTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl AccelerationStructureMotionInstanceTypeNV { pub const STATIC: Self = Self(0); pub const MATRIX_MOTION: Self = Self(1); pub const SRT_MOTION: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DeviceAddressBindingTypeEXT(pub(crate) i32); impl DeviceAddressBindingTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DeviceAddressBindingTypeEXT { pub const BIND: Self = Self(0); pub const UNBIND: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct QueryResultStatusKHR(pub(crate) i32); impl QueryResultStatusKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl QueryResultStatusKHR { pub const ERROR: Self = Self(-1); pub const NOT_READY: Self = Self(0); pub const COMPLETE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct VideoEncodeTuningModeKHR(pub(crate) i32); impl VideoEncodeTuningModeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl VideoEncodeTuningModeKHR { pub const DEFAULT: Self = Self(0); pub const HIGH_QUALITY: Self = Self(1); pub const LOW_LATENCY: Self = Self(2); pub const ULTRA_LOW_LATENCY: Self = Self(3); pub const LOSSLESS: Self = Self(4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PipelineRobustnessBufferBehaviorEXT(pub(crate) i32); impl PipelineRobustnessBufferBehaviorEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PipelineRobustnessBufferBehaviorEXT { pub const DEVICE_DEFAULT: Self = Self(0); pub const DISABLED: Self = Self(1); pub const ROBUST_BUFFER_ACCESS: Self = Self(2); pub const ROBUST_BUFFER_ACCESS_2: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct PipelineRobustnessImageBehaviorEXT(pub(crate) i32); impl PipelineRobustnessImageBehaviorEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl PipelineRobustnessImageBehaviorEXT { pub const DEVICE_DEFAULT: Self = Self(0); pub const DISABLED: Self = Self(1); pub const ROBUST_IMAGE_ACCESS: Self = Self(2); pub const ROBUST_IMAGE_ACCESS_2: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct OpticalFlowPerformanceLevelNV(pub(crate) i32); impl OpticalFlowPerformanceLevelNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl OpticalFlowPerformanceLevelNV { pub const UNKNOWN: Self = Self(0); pub const SLOW: Self = Self(1); pub const MEDIUM: Self = Self(2); pub const FAST: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct OpticalFlowSessionBindingPointNV(pub(crate) i32); impl OpticalFlowSessionBindingPointNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl OpticalFlowSessionBindingPointNV { pub const UNKNOWN: Self = Self(0); pub const INPUT: Self = Self(1); pub const REFERENCE: Self = Self(2); pub const HINT: Self = Self(3); pub const FLOW_VECTOR: Self = Self(4); pub const BACKWARD_FLOW_VECTOR: Self = Self(5); pub const COST: Self = Self(6); pub const BACKWARD_COST: Self = Self(7); pub const GLOBAL_FLOW: Self = Self(8); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct MicromapTypeEXT(pub(crate) i32); impl MicromapTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl MicromapTypeEXT { pub const OPACITY_MICROMAP: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CopyMicromapModeEXT(pub(crate) i32); impl CopyMicromapModeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CopyMicromapModeEXT { pub const CLONE: Self = Self(0); pub const SERIALIZE: Self = Self(1); pub const DESERIALIZE: Self = Self(2); pub const COMPACT: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BuildMicromapModeEXT(pub(crate) i32); impl BuildMicromapModeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BuildMicromapModeEXT { pub const BUILD: Self = Self(0); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct OpacityMicromapFormatEXT(pub(crate) i32); impl OpacityMicromapFormatEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl OpacityMicromapFormatEXT { pub const TYPE_2_STATE: Self = Self(1); pub const TYPE_4_STATE: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct OpacityMicromapSpecialIndexEXT(pub(crate) i32); impl OpacityMicromapSpecialIndexEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl OpacityMicromapSpecialIndexEXT { pub const FULLY_TRANSPARENT: Self = Self(-1); pub const FULLY_OPAQUE: Self = Self(-2); pub const FULLY_UNKNOWN_TRANSPARENT: Self = Self(-3); pub const FULLY_UNKNOWN_OPAQUE: Self = Self(-4); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DepthBiasRepresentationEXT(pub(crate) i32); impl DepthBiasRepresentationEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DepthBiasRepresentationEXT { pub const LEAST_REPRESENTABLE_VALUE_FORMAT: Self = Self(0); pub const LEAST_REPRESENTABLE_VALUE_FORCE_UNORM: Self = Self(1); pub const FLOAT: Self = Self(2); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DeviceFaultAddressTypeEXT(pub(crate) i32); impl DeviceFaultAddressTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DeviceFaultAddressTypeEXT { #[doc = "Currently unused"] pub const NONE: Self = Self(0); pub const READ_INVALID: Self = Self(1); pub const WRITE_INVALID: Self = Self(2); pub const EXECUTE_INVALID: Self = Self(3); pub const INSTRUCTION_POINTER_UNKNOWN: Self = Self(4); pub const INSTRUCTION_POINTER_INVALID: Self = Self(5); pub const INSTRUCTION_POINTER_FAULT: Self = Self(6); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DeviceFaultVendorBinaryHeaderVersionEXT(pub(crate) i32); impl DeviceFaultVendorBinaryHeaderVersionEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DeviceFaultVendorBinaryHeaderVersionEXT { pub const ONE: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct DisplacementMicromapFormatNV(pub(crate) i32); impl DisplacementMicromapFormatNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl DisplacementMicromapFormatNV { pub const TYPE_64_TRIANGLES_64_BYTES: Self = Self(1); pub const TYPE_256_TRIANGLES_128_BYTES: Self = Self(2); pub const TYPE_1024_TRIANGLES_128_BYTES: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ShaderCodeTypeEXT(pub(crate) i32); impl ShaderCodeTypeEXT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ShaderCodeTypeEXT { pub const BINARY: Self = Self(0); pub const SPIRV: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ScopeKHR(pub(crate) i32); impl ScopeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ScopeKHR { pub const DEVICE: Self = Self(1); pub const WORKGROUP: Self = Self(2); pub const SUBGROUP: Self = Self(3); pub const QUEUE_FAMILY: Self = Self(5); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct ComponentTypeKHR(pub(crate) i32); impl ComponentTypeKHR { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl ComponentTypeKHR { pub const FLOAT16: Self = Self(0); pub const FLOAT32: Self = Self(1); pub const FLOAT64: Self = Self(2); pub const SINT8: Self = Self(3); pub const SINT16: Self = Self(4); pub const SINT32: Self = Self(5); pub const SINT64: Self = Self(6); pub const UINT8: Self = Self(7); pub const UINT16: Self = Self(8); pub const UINT32: Self = Self(9); pub const UINT64: Self = Self(10); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct CubicFilterWeightsQCOM(pub(crate) i32); impl CubicFilterWeightsQCOM { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl CubicFilterWeightsQCOM { pub const CATMULL_ROM: Self = Self(0); pub const ZERO_TANGENT_CARDINAL: Self = Self(1); pub const B_SPLINE: Self = Self(2); pub const MITCHELL_NETRAVALI: Self = Self(3); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct BlockMatchWindowCompareModeQCOM(pub(crate) i32); impl BlockMatchWindowCompareModeQCOM { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl BlockMatchWindowCompareModeQCOM { pub const MIN: Self = Self(0); pub const MAX: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct LayeredDriverUnderlyingApiMSFT(pub(crate) i32); impl LayeredDriverUnderlyingApiMSFT { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl LayeredDriverUnderlyingApiMSFT { pub const NONE: Self = Self(0); pub const D3D12: Self = Self(1); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct LatencyMarkerNV(pub(crate) i32); impl LatencyMarkerNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl LatencyMarkerNV { pub const SIMULATION_START: Self = Self(0); pub const SIMULATION_END: Self = Self(1); pub const RENDERSUBMIT_START: Self = Self(2); pub const RENDERSUBMIT_END: Self = Self(3); pub const PRESENT_START: Self = Self(4); pub const PRESENT_END: Self = Self(5); pub const INPUT_SAMPLE: Self = Self(6); pub const TRIGGER_FLASH: Self = Self(7); pub const OUT_OF_BAND_RENDERSUBMIT_START: Self = Self(8); pub const OUT_OF_BAND_RENDERSUBMIT_END: Self = Self(9); pub const OUT_OF_BAND_PRESENT_START: Self = Self(10); pub const OUT_OF_BAND_PRESENT_END: Self = Self(11); } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[repr(transparent)] #[doc = ""] pub struct OutOfBandQueueTypeNV(pub(crate) i32); impl OutOfBandQueueTypeNV { #[inline] pub const fn from_raw(x: i32) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> i32 { self.0 } } impl OutOfBandQueueTypeNV { pub const RENDER: Self = Self(0); pub const PRESENT: Self = Self(1); } impl fmt::Debug for ObjectType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::UNKNOWN => Some("UNKNOWN"), Self::INSTANCE => Some("INSTANCE"), Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"), Self::DEVICE => Some("DEVICE"), Self::QUEUE => Some("QUEUE"), Self::SEMAPHORE => Some("SEMAPHORE"), Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"), Self::FENCE => Some("FENCE"), Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"), Self::BUFFER => Some("BUFFER"), Self::IMAGE => Some("IMAGE"), Self::EVENT => Some("EVENT"), Self::QUERY_POOL => Some("QUERY_POOL"), Self::BUFFER_VIEW => Some("BUFFER_VIEW"), Self::IMAGE_VIEW => Some("IMAGE_VIEW"), Self::SHADER_MODULE => Some("SHADER_MODULE"), Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"), Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"), Self::RENDER_PASS => Some("RENDER_PASS"), Self::PIPELINE => Some("PIPELINE"), Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"), Self::SAMPLER => Some("SAMPLER"), Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"), Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"), Self::FRAMEBUFFER => Some("FRAMEBUFFER"), Self::COMMAND_POOL => Some("COMMAND_POOL"), Self::SURFACE_KHR => Some("SURFACE_KHR"), Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"), Self::DISPLAY_KHR => Some("DISPLAY_KHR"), Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"), Self::DEBUG_REPORT_CALLBACK_EXT => Some("DEBUG_REPORT_CALLBACK_EXT"), Self::VIDEO_SESSION_KHR => Some("VIDEO_SESSION_KHR"), Self::VIDEO_SESSION_PARAMETERS_KHR => Some("VIDEO_SESSION_PARAMETERS_KHR"), Self::CU_MODULE_NVX => Some("CU_MODULE_NVX"), Self::CU_FUNCTION_NVX => Some("CU_FUNCTION_NVX"), Self::DEBUG_UTILS_MESSENGER_EXT => Some("DEBUG_UTILS_MESSENGER_EXT"), Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"), Self::VALIDATION_CACHE_EXT => Some("VALIDATION_CACHE_EXT"), Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"), Self::PERFORMANCE_CONFIGURATION_INTEL => Some("PERFORMANCE_CONFIGURATION_INTEL"), Self::DEFERRED_OPERATION_KHR => Some("DEFERRED_OPERATION_KHR"), Self::INDIRECT_COMMANDS_LAYOUT_NV => Some("INDIRECT_COMMANDS_LAYOUT_NV"), Self::CUDA_MODULE_NV => Some("CUDA_MODULE_NV"), Self::CUDA_FUNCTION_NV => Some("CUDA_FUNCTION_NV"), Self::BUFFER_COLLECTION_FUCHSIA => Some("BUFFER_COLLECTION_FUCHSIA"), Self::MICROMAP_EXT => Some("MICROMAP_EXT"), Self::OPTICAL_FLOW_SESSION_NV => Some("OPTICAL_FLOW_SESSION_NV"), Self::SHADER_EXT => Some("SHADER_EXT"), Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"), Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"), Self::PRIVATE_DATA_SLOT => Some("PRIVATE_DATA_SLOT"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } impl fmt::Debug for Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let name = match *self { Self::SUCCESS => Some("SUCCESS"), Self::NOT_READY => Some("NOT_READY"), Self::TIMEOUT => Some("TIMEOUT"), Self::EVENT_SET => Some("EVENT_SET"), Self::EVENT_RESET => Some("EVENT_RESET"), Self::INCOMPLETE => Some("INCOMPLETE"), Self::ERROR_OUT_OF_HOST_MEMORY => Some("ERROR_OUT_OF_HOST_MEMORY"), Self::ERROR_OUT_OF_DEVICE_MEMORY => Some("ERROR_OUT_OF_DEVICE_MEMORY"), Self::ERROR_INITIALIZATION_FAILED => Some("ERROR_INITIALIZATION_FAILED"), Self::ERROR_DEVICE_LOST => Some("ERROR_DEVICE_LOST"), Self::ERROR_MEMORY_MAP_FAILED => Some("ERROR_MEMORY_MAP_FAILED"), Self::ERROR_LAYER_NOT_PRESENT => Some("ERROR_LAYER_NOT_PRESENT"), Self::ERROR_EXTENSION_NOT_PRESENT => Some("ERROR_EXTENSION_NOT_PRESENT"), Self::ERROR_FEATURE_NOT_PRESENT => Some("ERROR_FEATURE_NOT_PRESENT"), Self::ERROR_INCOMPATIBLE_DRIVER => Some("ERROR_INCOMPATIBLE_DRIVER"), Self::ERROR_TOO_MANY_OBJECTS => Some("ERROR_TOO_MANY_OBJECTS"), Self::ERROR_FORMAT_NOT_SUPPORTED => Some("ERROR_FORMAT_NOT_SUPPORTED"), Self::ERROR_FRAGMENTED_POOL => Some("ERROR_FRAGMENTED_POOL"), Self::ERROR_UNKNOWN => Some("ERROR_UNKNOWN"), Self::ERROR_SURFACE_LOST_KHR => Some("ERROR_SURFACE_LOST_KHR"), Self::ERROR_NATIVE_WINDOW_IN_USE_KHR => Some("ERROR_NATIVE_WINDOW_IN_USE_KHR"), Self::SUBOPTIMAL_KHR => Some("SUBOPTIMAL_KHR"), Self::ERROR_OUT_OF_DATE_KHR => Some("ERROR_OUT_OF_DATE_KHR"), Self::ERROR_INCOMPATIBLE_DISPLAY_KHR => Some("ERROR_INCOMPATIBLE_DISPLAY_KHR"), Self::ERROR_VALIDATION_FAILED_EXT => Some("ERROR_VALIDATION_FAILED_EXT"), Self::ERROR_INVALID_SHADER_NV => Some("ERROR_INVALID_SHADER_NV"), Self::ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR => { Some("ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR") } Self::ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR => { Some("ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR") } Self::ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR => { Some("ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR") } Self::ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR => { Some("ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR") } Self::ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR => { Some("ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR") } Self::ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR => { Some("ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR") } Self::ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT => { Some("ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT") } Self::ERROR_NOT_PERMITTED_KHR => Some("ERROR_NOT_PERMITTED_KHR"), Self::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT => { Some("ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT") } Self::THREAD_IDLE_KHR => Some("THREAD_IDLE_KHR"), Self::THREAD_DONE_KHR => Some("THREAD_DONE_KHR"), Self::OPERATION_DEFERRED_KHR => Some("OPERATION_DEFERRED_KHR"), Self::OPERATION_NOT_DEFERRED_KHR => Some("OPERATION_NOT_DEFERRED_KHR"), Self::ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR => { Some("ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR") } Self::ERROR_COMPRESSION_EXHAUSTED_EXT => Some("ERROR_COMPRESSION_EXHAUSTED_EXT"), Self::INCOMPATIBLE_SHADER_BINARY_EXT => Some("INCOMPATIBLE_SHADER_BINARY_EXT"), Self::ERROR_OUT_OF_POOL_MEMORY => Some("ERROR_OUT_OF_POOL_MEMORY"), Self::ERROR_INVALID_EXTERNAL_HANDLE => Some("ERROR_INVALID_EXTERNAL_HANDLE"), Self::ERROR_FRAGMENTATION => Some("ERROR_FRAGMENTATION"), Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS => { Some("ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS") } Self::PIPELINE_COMPILE_REQUIRED => Some("PIPELINE_COMPILE_REQUIRED"), _ => None, }; if let Some(x) = name { f.write_str(x) } else { self.0.fmt(f) } } } ash-0.38.0+1.3.281/src/vk/extensions.rs000064400000000000000000013427001046102023000152600ustar 00000000000000#![allow(unused_qualifications)] use crate::vk::aliases::*; use crate::vk::bitflags::*; use crate::vk::definitions::*; use crate::vk::enums::*; use crate::vk::platform_types::*; use core::ffi::*; #[doc = "Generated from 'VK_KHR_surface'"] impl ObjectType { pub const SURFACE_KHR: Self = Self(1_000_000_000); } #[doc = "Generated from 'VK_KHR_surface'"] impl Result { pub const ERROR_SURFACE_LOST_KHR: Self = Self(-1_000_000_000); pub const ERROR_NATIVE_WINDOW_IN_USE_KHR: Self = Self(-1_000_000_001); } #[doc = "Generated from 'VK_KHR_swapchain'"] impl ImageLayout { pub const PRESENT_SRC_KHR: Self = Self(1_000_001_002); } #[doc = "Generated from 'VK_KHR_swapchain'"] impl ObjectType { pub const SWAPCHAIN_KHR: Self = Self(1_000_001_000); } #[doc = "Generated from 'VK_KHR_swapchain'"] impl Result { pub const SUBOPTIMAL_KHR: Self = Self(1_000_001_003); pub const ERROR_OUT_OF_DATE_KHR: Self = Self(-1_000_001_004); } #[doc = "Generated from 'VK_KHR_swapchain'"] impl StructureType { pub const SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_001_000); pub const PRESENT_INFO_KHR: Self = Self(1_000_001_001); pub const DEVICE_GROUP_PRESENT_CAPABILITIES_KHR: Self = Self(1_000_060_007); pub const IMAGE_SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_060_008); pub const BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: Self = Self(1_000_060_009); pub const ACQUIRE_NEXT_IMAGE_INFO_KHR: Self = Self(1_000_060_010); pub const DEVICE_GROUP_PRESENT_INFO_KHR: Self = Self(1_000_060_011); pub const DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_060_012); } #[doc = "Generated from 'VK_KHR_swapchain'"] impl SwapchainCreateFlagsKHR { #[doc = "Allow images with VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"] pub const SPLIT_INSTANCE_BIND_REGIONS: Self = Self(0b1); #[doc = "Swapchain is protected"] pub const PROTECTED: Self = Self(0b10); } #[doc = "Generated from 'VK_KHR_display'"] impl ObjectType { pub const DISPLAY_KHR: Self = Self(1_000_002_000); pub const DISPLAY_MODE_KHR: Self = Self(1_000_002_001); } #[doc = "Generated from 'VK_KHR_display'"] impl StructureType { pub const DISPLAY_MODE_CREATE_INFO_KHR: Self = Self(1_000_002_000); pub const DISPLAY_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_002_001); } #[doc = "Generated from 'VK_KHR_display_swapchain'"] impl Result { pub const ERROR_INCOMPATIBLE_DISPLAY_KHR: Self = Self(-1_000_003_001); } #[doc = "Generated from 'VK_KHR_display_swapchain'"] impl StructureType { pub const DISPLAY_PRESENT_INFO_KHR: Self = Self(1_000_003_000); } #[doc = "Generated from 'VK_KHR_xlib_surface'"] impl StructureType { pub const XLIB_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_004_000); } #[doc = "Generated from 'VK_KHR_xcb_surface'"] impl StructureType { pub const XCB_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_005_000); } #[doc = "Generated from 'VK_KHR_wayland_surface'"] impl StructureType { pub const WAYLAND_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_006_000); } #[doc = "Generated from 'VK_KHR_android_surface'"] impl StructureType { pub const ANDROID_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_008_000); } #[doc = "Generated from 'VK_KHR_win32_surface'"] impl StructureType { pub const WIN32_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_009_000); } #[doc = "Generated from 'VK_ANDROID_native_buffer'"] impl StructureType { pub const NATIVE_BUFFER_ANDROID: Self = Self(1_000_010_000); pub const SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID: Self = Self(1_000_010_001); pub const PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: Self = Self(1_000_010_002); } #[doc = "Generated from 'VK_EXT_debug_report'"] impl DebugReportObjectTypeEXT { pub const SAMPLER_YCBCR_CONVERSION: Self = Self(1_000_156_000); pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = Self(1_000_085_000); } #[doc = "Generated from 'VK_EXT_debug_report'"] impl ObjectType { pub const DEBUG_REPORT_CALLBACK_EXT: Self = Self(1_000_011_000); } #[doc = "Generated from 'VK_EXT_debug_report'"] impl Result { pub const ERROR_VALIDATION_FAILED_EXT: Self = Self(-1_000_011_001); } #[doc = "Generated from 'VK_EXT_debug_report'"] impl StructureType { pub const DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: Self = Self(1_000_011_000); } #[doc = "Generated from 'VK_NV_glsl_shader'"] impl Result { pub const ERROR_INVALID_SHADER_NV: Self = Self(-1_000_012_000); } #[doc = "Generated from 'VK_KHR_sampler_mirror_clamp_to_edge'"] impl SamplerAddressMode { #[doc = "Note that this defines what was previously a core enum, and so uses the 'value' attribute rather than 'offset', and does not have a suffix. This is a special case, and should not be repeated"] pub const MIRROR_CLAMP_TO_EDGE: Self = Self(4); } #[doc = "Generated from 'VK_IMG_filter_cubic'"] impl Filter { pub const CUBIC_IMG: Self = Self::CUBIC_EXT; } #[doc = "Generated from 'VK_IMG_filter_cubic'"] impl FormatFeatureFlags { #[doc = "Format can be filtered with VK_FILTER_CUBIC_IMG when being sampled"] pub const SAMPLED_IMAGE_FILTER_CUBIC_IMG: Self = Self::SAMPLED_IMAGE_FILTER_CUBIC_EXT; } #[doc = "Generated from 'VK_AMD_rasterization_order'"] impl StructureType { pub const PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: Self = Self(1_000_018_000); } #[doc = "Generated from 'VK_EXT_debug_marker'"] impl StructureType { pub const DEBUG_MARKER_OBJECT_NAME_INFO_EXT: Self = Self(1_000_022_000); pub const DEBUG_MARKER_OBJECT_TAG_INFO_EXT: Self = Self(1_000_022_001); pub const DEBUG_MARKER_MARKER_INFO_EXT: Self = Self(1_000_022_002); } #[doc = "Generated from 'VK_KHR_video_queue'"] impl ObjectType { #[doc = "VkVideoSessionKHR"] pub const VIDEO_SESSION_KHR: Self = Self(1_000_023_000); #[doc = "VkVideoSessionParametersKHR"] pub const VIDEO_SESSION_PARAMETERS_KHR: Self = Self(1_000_023_001); } #[doc = "Generated from 'VK_KHR_video_queue'"] impl QueryResultFlags { pub const WITH_STATUS_KHR: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_KHR_video_queue'"] impl QueryType { pub const RESULT_STATUS_ONLY_KHR: Self = Self(1_000_023_000); } #[doc = "Generated from 'VK_KHR_video_queue'"] impl Result { pub const ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_000); pub const ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_001); pub const ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_002); pub const ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_003); pub const ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_004); pub const ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_005); } #[doc = "Generated from 'VK_KHR_video_queue'"] impl StructureType { pub const VIDEO_PROFILE_INFO_KHR: Self = Self(1_000_023_000); pub const VIDEO_CAPABILITIES_KHR: Self = Self(1_000_023_001); pub const VIDEO_PICTURE_RESOURCE_INFO_KHR: Self = Self(1_000_023_002); pub const VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR: Self = Self(1_000_023_003); pub const BIND_VIDEO_SESSION_MEMORY_INFO_KHR: Self = Self(1_000_023_004); pub const VIDEO_SESSION_CREATE_INFO_KHR: Self = Self(1_000_023_005); pub const VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_023_006); pub const VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR: Self = Self(1_000_023_007); pub const VIDEO_BEGIN_CODING_INFO_KHR: Self = Self(1_000_023_008); pub const VIDEO_END_CODING_INFO_KHR: Self = Self(1_000_023_009); pub const VIDEO_CODING_CONTROL_INFO_KHR: Self = Self(1_000_023_010); pub const VIDEO_REFERENCE_SLOT_INFO_KHR: Self = Self(1_000_023_011); pub const QUEUE_FAMILY_VIDEO_PROPERTIES_KHR: Self = Self(1_000_023_012); pub const VIDEO_PROFILE_LIST_INFO_KHR: Self = Self(1_000_023_013); pub const PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR: Self = Self(1_000_023_014); pub const VIDEO_FORMAT_PROPERTIES_KHR: Self = Self(1_000_023_015); pub const QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR: Self = Self(1_000_023_016); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl AccessFlags2 { pub const VIDEO_DECODE_READ_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000); pub const VIDEO_DECODE_WRITE_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl BufferUsageFlags { pub const VIDEO_DECODE_SRC_KHR: Self = Self(0b10_0000_0000_0000); pub const VIDEO_DECODE_DST_KHR: Self = Self(0b100_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl FormatFeatureFlags { pub const VIDEO_DECODE_OUTPUT_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl FormatFeatureFlags2 { pub const VIDEO_DECODE_OUTPUT_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl ImageLayout { pub const VIDEO_DECODE_DST_KHR: Self = Self(1_000_024_000); pub const VIDEO_DECODE_SRC_KHR: Self = Self(1_000_024_001); pub const VIDEO_DECODE_DPB_KHR: Self = Self(1_000_024_002); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl ImageUsageFlags { pub const VIDEO_DECODE_DST_KHR: Self = Self(0b100_0000_0000); pub const VIDEO_DECODE_SRC_KHR: Self = Self(0b1000_0000_0000); pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b1_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl PipelineStageFlags2 { pub const VIDEO_DECODE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl QueueFlags { pub const VIDEO_DECODE_KHR: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_KHR_video_decode_queue'"] impl StructureType { pub const VIDEO_DECODE_INFO_KHR: Self = Self(1_000_024_000); pub const VIDEO_DECODE_CAPABILITIES_KHR: Self = Self(1_000_024_001); pub const VIDEO_DECODE_USAGE_INFO_KHR: Self = Self(1_000_024_002); } #[doc = "Generated from 'VK_NV_dedicated_allocation'"] impl StructureType { pub const DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: Self = Self(1_000_026_000); pub const DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: Self = Self(1_000_026_001); pub const DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: Self = Self(1_000_026_002); } #[doc = "Generated from 'VK_EXT_transform_feedback'"] impl AccessFlags { pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_transform_feedback'"] impl BufferUsageFlags { pub const TRANSFORM_FEEDBACK_BUFFER_EXT: Self = Self(0b1000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT: Self = Self(0b1_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_transform_feedback'"] impl PipelineStageFlags { pub const TRANSFORM_FEEDBACK_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_transform_feedback'"] impl QueryType { pub const TRANSFORM_FEEDBACK_STREAM_EXT: Self = Self(1_000_028_004); } #[doc = "Generated from 'VK_EXT_transform_feedback'"] impl StructureType { pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: Self = Self(1_000_028_000); pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: Self = Self(1_000_028_001); pub const PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: Self = Self(1_000_028_002); } #[doc = "Generated from 'VK_NVX_binary_import'"] impl DebugReportObjectTypeEXT { pub const CU_MODULE_NVX: Self = Self(1_000_029_000); pub const CU_FUNCTION_NVX: Self = Self(1_000_029_001); } #[doc = "Generated from 'VK_NVX_binary_import'"] impl ObjectType { pub const CU_MODULE_NVX: Self = Self(1_000_029_000); pub const CU_FUNCTION_NVX: Self = Self(1_000_029_001); } #[doc = "Generated from 'VK_NVX_binary_import'"] impl StructureType { pub const CU_MODULE_CREATE_INFO_NVX: Self = Self(1_000_029_000); pub const CU_FUNCTION_CREATE_INFO_NVX: Self = Self(1_000_029_001); pub const CU_LAUNCH_INFO_NVX: Self = Self(1_000_029_002); } #[doc = "Generated from 'VK_NVX_image_view_handle'"] impl StructureType { pub const IMAGE_VIEW_HANDLE_INFO_NVX: Self = Self(1_000_030_000); pub const IMAGE_VIEW_ADDRESS_PROPERTIES_NVX: Self = Self(1_000_030_001); } #[doc = "Generated from 'VK_KHR_video_encode_h264'"] impl StructureType { pub const VIDEO_ENCODE_H264_CAPABILITIES_KHR: Self = Self(1_000_038_000); pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_038_001); pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_038_002); pub const VIDEO_ENCODE_H264_PICTURE_INFO_KHR: Self = Self(1_000_038_003); pub const VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR: Self = Self(1_000_038_004); pub const VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR: Self = Self(1_000_038_005); pub const VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR: Self = Self(1_000_038_006); pub const VIDEO_ENCODE_H264_PROFILE_INFO_KHR: Self = Self(1_000_038_007); pub const VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR: Self = Self(1_000_038_008); pub const VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR: Self = Self(1_000_038_009); pub const VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR: Self = Self(1_000_038_010); pub const VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR: Self = Self(1_000_038_011); pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR: Self = Self(1_000_038_012); pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: Self = Self(1_000_038_013); } #[doc = "Generated from 'VK_KHR_video_encode_h264'"] impl VideoCodecOperationFlagsKHR { pub const ENCODE_H264: Self = Self(0b1_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_h265'"] impl StructureType { pub const VIDEO_ENCODE_H265_CAPABILITIES_KHR: Self = Self(1_000_039_000); pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_039_001); pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_039_002); pub const VIDEO_ENCODE_H265_PICTURE_INFO_KHR: Self = Self(1_000_039_003); pub const VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR: Self = Self(1_000_039_004); pub const VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR: Self = Self(1_000_039_005); pub const VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR: Self = Self(1_000_039_006); pub const VIDEO_ENCODE_H265_PROFILE_INFO_KHR: Self = Self(1_000_039_007); pub const VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR: Self = Self(1_000_039_009); pub const VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR: Self = Self(1_000_039_010); pub const VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR: Self = Self(1_000_039_011); pub const VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR: Self = Self(1_000_039_012); pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR: Self = Self(1_000_039_013); pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: Self = Self(1_000_039_014); } #[doc = "Generated from 'VK_KHR_video_encode_h265'"] impl VideoCodecOperationFlagsKHR { pub const ENCODE_H265: Self = Self(0b10_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_decode_h264'"] impl StructureType { pub const VIDEO_DECODE_H264_CAPABILITIES_KHR: Self = Self(1_000_040_000); pub const VIDEO_DECODE_H264_PICTURE_INFO_KHR: Self = Self(1_000_040_001); pub const VIDEO_DECODE_H264_PROFILE_INFO_KHR: Self = Self(1_000_040_003); pub const VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_040_004); pub const VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_040_005); pub const VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR: Self = Self(1_000_040_006); } #[doc = "Generated from 'VK_KHR_video_decode_h264'"] impl VideoCodecOperationFlagsKHR { pub const DECODE_H264: Self = Self(0b1); } #[doc = "Generated from 'VK_AMD_texture_gather_bias_lod'"] impl StructureType { pub const TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: Self = Self(1_000_041_000); } #[doc = "Generated from 'VK_KHR_dynamic_rendering'"] impl AttachmentStoreOp { pub const NONE_KHR: Self = Self::NONE; } #[doc = "Generated from 'VK_KHR_dynamic_rendering'"] impl PipelineCreateFlags { pub const RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b10_0000_0000_0000_0000_0000); pub const RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT: Self = Self(0b100_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_dynamic_rendering'"] impl StructureType { pub const RENDERING_INFO_KHR: Self = Self::RENDERING_INFO; pub const RENDERING_ATTACHMENT_INFO_KHR: Self = Self::RENDERING_ATTACHMENT_INFO; pub const PIPELINE_RENDERING_CREATE_INFO_KHR: Self = Self::PIPELINE_RENDERING_CREATE_INFO; pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES; pub const COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR: Self = Self::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO; pub const RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: Self = Self(1_000_044_006); pub const RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: Self = Self(1_000_044_007); pub const ATTACHMENT_SAMPLE_COUNT_INFO_AMD: Self = Self(1_000_044_008); pub const ATTACHMENT_SAMPLE_COUNT_INFO_NV: Self = Self::ATTACHMENT_SAMPLE_COUNT_INFO_AMD; pub const MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: Self = Self(1_000_044_009); } #[doc = "Generated from 'VK_GGP_stream_descriptor_surface'"] impl StructureType { pub const STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP: Self = Self(1_000_049_000); } #[doc = "Generated from 'VK_NV_corner_sampled_image'"] impl ImageCreateFlags { pub const CORNER_SAMPLED_NV: Self = Self(0b10_0000_0000_0000); } #[doc = "Generated from 'VK_NV_corner_sampled_image'"] impl StructureType { pub const PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: Self = Self(1_000_050_000); } #[doc = "Generated from 'VK_KHR_multiview'"] impl DependencyFlags { pub const VIEW_LOCAL_KHR: Self = Self::VIEW_LOCAL; } #[doc = "Generated from 'VK_KHR_multiview'"] impl StructureType { pub const RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR: Self = Self::RENDER_PASS_MULTIVIEW_CREATE_INFO; pub const PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_MULTIVIEW_FEATURES; pub const PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES; } #[doc = "Generated from 'VK_IMG_format_pvrtc'"] impl Format { pub const PVRTC1_2BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_000); pub const PVRTC1_4BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_001); pub const PVRTC2_2BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_002); pub const PVRTC2_4BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_003); pub const PVRTC1_2BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_004); pub const PVRTC1_4BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_005); pub const PVRTC2_2BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_006); pub const PVRTC2_4BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_007); } #[doc = "Generated from 'VK_NV_external_memory'"] impl StructureType { pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: Self = Self(1_000_056_000); pub const EXPORT_MEMORY_ALLOCATE_INFO_NV: Self = Self(1_000_056_001); } #[doc = "Generated from 'VK_NV_external_memory_win32'"] impl StructureType { pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = Self(1_000_057_000); pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = Self(1_000_057_001); } #[doc = "Generated from 'VK_NV_win32_keyed_mutex'"] impl StructureType { pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: Self = Self(1_000_058_000); } #[doc = "Generated from 'VK_KHR_get_physical_device_properties2'"] impl StructureType { pub const PHYSICAL_DEVICE_FEATURES_2_KHR: Self = Self::PHYSICAL_DEVICE_FEATURES_2; pub const PHYSICAL_DEVICE_PROPERTIES_2_KHR: Self = Self::PHYSICAL_DEVICE_PROPERTIES_2; pub const FORMAT_PROPERTIES_2_KHR: Self = Self::FORMAT_PROPERTIES_2; pub const IMAGE_FORMAT_PROPERTIES_2_KHR: Self = Self::IMAGE_FORMAT_PROPERTIES_2; pub const PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR: Self = Self::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2; pub const QUEUE_FAMILY_PROPERTIES_2_KHR: Self = Self::QUEUE_FAMILY_PROPERTIES_2; pub const PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR: Self = Self::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2; pub const SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR: Self = Self::SPARSE_IMAGE_FORMAT_PROPERTIES_2; pub const PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR: Self = Self::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2; } #[doc = "Generated from 'VK_KHR_device_group'"] impl DependencyFlags { pub const DEVICE_GROUP_KHR: Self = Self::DEVICE_GROUP; } #[doc = "Generated from 'VK_KHR_device_group'"] impl ImageCreateFlags { pub const SPLIT_INSTANCE_BIND_REGIONS_KHR: Self = Self::SPLIT_INSTANCE_BIND_REGIONS; } #[doc = "Generated from 'VK_KHR_device_group'"] impl MemoryAllocateFlags { pub const DEVICE_MASK_KHR: Self = Self::DEVICE_MASK; } #[doc = "Generated from 'VK_KHR_device_group'"] impl PeerMemoryFeatureFlags { pub const COPY_SRC_KHR: Self = Self::COPY_SRC; pub const COPY_DST_KHR: Self = Self::COPY_DST; pub const GENERIC_SRC_KHR: Self = Self::GENERIC_SRC; pub const GENERIC_DST_KHR: Self = Self::GENERIC_DST; } #[doc = "Generated from 'VK_KHR_device_group'"] impl PipelineCreateFlags { pub const VIEW_INDEX_FROM_DEVICE_INDEX_KHR: Self = Self::VIEW_INDEX_FROM_DEVICE_INDEX; } #[doc = "Generated from 'VK_KHR_device_group'"] impl StructureType { pub const MEMORY_ALLOCATE_FLAGS_INFO_KHR: Self = Self::MEMORY_ALLOCATE_FLAGS_INFO; pub const DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR: Self = Self::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO; pub const DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR: Self = Self::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO; pub const DEVICE_GROUP_SUBMIT_INFO_KHR: Self = Self::DEVICE_GROUP_SUBMIT_INFO; pub const DEVICE_GROUP_BIND_SPARSE_INFO_KHR: Self = Self::DEVICE_GROUP_BIND_SPARSE_INFO; pub const BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR: Self = Self::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO; pub const BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR: Self = Self::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO; } #[doc = "Generated from 'VK_EXT_validation_flags'"] impl StructureType { pub const VALIDATION_FLAGS_EXT: Self = Self(1_000_061_000); } #[doc = "Generated from 'VK_NN_vi_surface'"] impl StructureType { pub const VI_SURFACE_CREATE_INFO_NN: Self = Self(1_000_062_000); } #[doc = "Generated from 'VK_EXT_texture_compression_astc_hdr'"] impl Format { pub const ASTC_4X4_SFLOAT_BLOCK_EXT: Self = Self::ASTC_4X4_SFLOAT_BLOCK; pub const ASTC_5X4_SFLOAT_BLOCK_EXT: Self = Self::ASTC_5X4_SFLOAT_BLOCK; pub const ASTC_5X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_5X5_SFLOAT_BLOCK; pub const ASTC_6X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_6X5_SFLOAT_BLOCK; pub const ASTC_6X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_6X6_SFLOAT_BLOCK; pub const ASTC_8X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X5_SFLOAT_BLOCK; pub const ASTC_8X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X6_SFLOAT_BLOCK; pub const ASTC_8X8_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X8_SFLOAT_BLOCK; pub const ASTC_10X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X5_SFLOAT_BLOCK; pub const ASTC_10X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X6_SFLOAT_BLOCK; pub const ASTC_10X8_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X8_SFLOAT_BLOCK; pub const ASTC_10X10_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X10_SFLOAT_BLOCK; pub const ASTC_12X10_SFLOAT_BLOCK_EXT: Self = Self::ASTC_12X10_SFLOAT_BLOCK; pub const ASTC_12X12_SFLOAT_BLOCK_EXT: Self = Self::ASTC_12X12_SFLOAT_BLOCK; } #[doc = "Generated from 'VK_EXT_texture_compression_astc_hdr'"] impl StructureType { pub const PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES; } #[doc = "Generated from 'VK_EXT_astc_decode_mode'"] impl StructureType { pub const IMAGE_VIEW_ASTC_DECODE_MODE_EXT: Self = Self(1_000_067_000); pub const PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: Self = Self(1_000_067_001); } #[doc = "Generated from 'VK_EXT_pipeline_robustness'"] impl StructureType { pub const PIPELINE_ROBUSTNESS_CREATE_INFO_EXT: Self = Self(1_000_068_000); pub const PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT: Self = Self(1_000_068_001); pub const PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT: Self = Self(1_000_068_002); } #[doc = "Generated from 'VK_KHR_maintenance1'"] impl FormatFeatureFlags { pub const TRANSFER_SRC_KHR: Self = Self::TRANSFER_SRC; pub const TRANSFER_DST_KHR: Self = Self::TRANSFER_DST; } #[doc = "Generated from 'VK_KHR_maintenance1'"] impl ImageCreateFlags { pub const TYPE_2D_ARRAY_COMPATIBLE_KHR: Self = Self::TYPE_2D_ARRAY_COMPATIBLE; } #[doc = "Generated from 'VK_KHR_maintenance1'"] impl Result { pub const ERROR_OUT_OF_POOL_MEMORY_KHR: Self = Self::ERROR_OUT_OF_POOL_MEMORY; } #[doc = "Generated from 'VK_KHR_device_group_creation'"] impl MemoryHeapFlags { pub const MULTI_INSTANCE_KHR: Self = Self::MULTI_INSTANCE; } #[doc = "Generated from 'VK_KHR_device_group_creation'"] impl StructureType { pub const PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_GROUP_PROPERTIES; pub const DEVICE_GROUP_DEVICE_CREATE_INFO_KHR: Self = Self::DEVICE_GROUP_DEVICE_CREATE_INFO; } #[doc = "Generated from 'VK_KHR_external_memory_capabilities'"] impl ExternalMemoryFeatureFlags { pub const DEDICATED_ONLY_KHR: Self = Self::DEDICATED_ONLY; pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE; pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE; } #[doc = "Generated from 'VK_KHR_external_memory_capabilities'"] impl ExternalMemoryHandleTypeFlags { pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD; pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32; pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT; pub const D3D11_TEXTURE_KHR: Self = Self::D3D11_TEXTURE; pub const D3D11_TEXTURE_KMT_KHR: Self = Self::D3D11_TEXTURE_KMT; pub const D3D12_HEAP_KHR: Self = Self::D3D12_HEAP; pub const D3D12_RESOURCE_KHR: Self = Self::D3D12_RESOURCE; } #[doc = "Generated from 'VK_KHR_external_memory_capabilities'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR: Self = Self::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO; pub const EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR: Self = Self::EXTERNAL_IMAGE_FORMAT_PROPERTIES; pub const PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR: Self = Self::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO; pub const EXTERNAL_BUFFER_PROPERTIES_KHR: Self = Self::EXTERNAL_BUFFER_PROPERTIES; pub const PHYSICAL_DEVICE_ID_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_ID_PROPERTIES; } #[doc = "Generated from 'VK_KHR_external_memory'"] impl Result { pub const ERROR_INVALID_EXTERNAL_HANDLE_KHR: Self = Self::ERROR_INVALID_EXTERNAL_HANDLE; } #[doc = "Generated from 'VK_KHR_external_memory'"] impl StructureType { pub const EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR: Self = Self::EXTERNAL_MEMORY_BUFFER_CREATE_INFO; pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR: Self = Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO; pub const EXPORT_MEMORY_ALLOCATE_INFO_KHR: Self = Self::EXPORT_MEMORY_ALLOCATE_INFO; } #[doc = "Generated from 'VK_KHR_external_memory_win32'"] impl StructureType { pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_000); pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_001); pub const MEMORY_WIN32_HANDLE_PROPERTIES_KHR: Self = Self(1_000_073_002); pub const MEMORY_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_003); } #[doc = "Generated from 'VK_KHR_external_memory_fd'"] impl StructureType { pub const IMPORT_MEMORY_FD_INFO_KHR: Self = Self(1_000_074_000); pub const MEMORY_FD_PROPERTIES_KHR: Self = Self(1_000_074_001); pub const MEMORY_GET_FD_INFO_KHR: Self = Self(1_000_074_002); } #[doc = "Generated from 'VK_KHR_win32_keyed_mutex'"] impl StructureType { pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: Self = Self(1_000_075_000); } #[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"] impl ExternalSemaphoreFeatureFlags { pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE; pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE; } #[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"] impl ExternalSemaphoreHandleTypeFlags { pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD; pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32; pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT; pub const D3D12_FENCE_KHR: Self = Self::D3D12_FENCE; pub const SYNC_FD_KHR: Self = Self::SYNC_FD; } #[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR: Self = Self::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO; pub const EXTERNAL_SEMAPHORE_PROPERTIES_KHR: Self = Self::EXTERNAL_SEMAPHORE_PROPERTIES; } #[doc = "Generated from 'VK_KHR_external_semaphore'"] impl SemaphoreImportFlags { pub const TEMPORARY_KHR: Self = Self::TEMPORARY; } #[doc = "Generated from 'VK_KHR_external_semaphore'"] impl StructureType { pub const EXPORT_SEMAPHORE_CREATE_INFO_KHR: Self = Self::EXPORT_SEMAPHORE_CREATE_INFO; } #[doc = "Generated from 'VK_KHR_external_semaphore_win32'"] impl StructureType { pub const IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_000); pub const EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_001); pub const D3D12_FENCE_SUBMIT_INFO_KHR: Self = Self(1_000_078_002); pub const SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_003); } #[doc = "Generated from 'VK_KHR_external_semaphore_fd'"] impl StructureType { pub const IMPORT_SEMAPHORE_FD_INFO_KHR: Self = Self(1_000_079_000); pub const SEMAPHORE_GET_FD_INFO_KHR: Self = Self(1_000_079_001); } #[doc = "Generated from 'VK_KHR_push_descriptor'"] impl DescriptorSetLayoutCreateFlags { #[doc = "Descriptors are pushed via flink:vkCmdPushDescriptorSetKHR"] pub const PUSH_DESCRIPTOR_KHR: Self = Self(0b1); } #[doc = "Generated from 'VK_KHR_push_descriptor'"] impl DescriptorUpdateTemplateType { #[doc = "Create descriptor update template for pushed descriptor updates"] pub const PUSH_DESCRIPTORS_KHR: Self = Self(1); } #[doc = "Generated from 'VK_KHR_push_descriptor'"] impl StructureType { pub const PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: Self = Self(1_000_080_000); } #[doc = "Generated from 'VK_EXT_conditional_rendering'"] impl AccessFlags { #[doc = "read access flag for reading conditional rendering predicate"] pub const CONDITIONAL_RENDERING_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_conditional_rendering'"] impl BufferUsageFlags { #[doc = "Specifies the buffer can be used as predicate in conditional rendering"] pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_EXT_conditional_rendering'"] impl PipelineStageFlags { #[doc = "A pipeline stage for conditional rendering predicate fetch"] pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b100_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_conditional_rendering'"] impl StructureType { pub const COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: Self = Self(1_000_081_000); pub const PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: Self = Self(1_000_081_001); pub const CONDITIONAL_RENDERING_BEGIN_INFO_EXT: Self = Self(1_000_081_002); } #[doc = "Generated from 'VK_KHR_shader_float16_int8'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES; pub const PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES; } #[doc = "Generated from 'VK_KHR_16bit_storage'"] impl StructureType { pub const PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES; } #[doc = "Generated from 'VK_KHR_incremental_present'"] impl StructureType { pub const PRESENT_REGIONS_KHR: Self = Self(1_000_084_000); } #[doc = "Generated from 'VK_KHR_descriptor_update_template'"] impl DebugReportObjectTypeEXT { pub const DESCRIPTOR_UPDATE_TEMPLATE_KHR: Self = Self::DESCRIPTOR_UPDATE_TEMPLATE; } #[doc = "Generated from 'VK_KHR_descriptor_update_template'"] impl DescriptorUpdateTemplateType { pub const DESCRIPTOR_SET_KHR: Self = Self::DESCRIPTOR_SET; } #[doc = "Generated from 'VK_KHR_descriptor_update_template'"] impl ObjectType { pub const DESCRIPTOR_UPDATE_TEMPLATE_KHR: Self = Self::DESCRIPTOR_UPDATE_TEMPLATE; } #[doc = "Generated from 'VK_KHR_descriptor_update_template'"] impl StructureType { pub const DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR: Self = Self::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO; } #[doc = "Generated from 'VK_NV_clip_space_w_scaling'"] impl DynamicState { pub const VIEWPORT_W_SCALING_NV: Self = Self(1_000_087_000); } #[doc = "Generated from 'VK_NV_clip_space_w_scaling'"] impl StructureType { pub const PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: Self = Self(1_000_087_000); } #[doc = "Generated from 'VK_EXT_display_surface_counter'"] impl StructureType { pub const SURFACE_CAPABILITIES_2_EXT: Self = Self(1_000_090_000); } #[doc = "Generated from 'VK_EXT_display_control'"] impl StructureType { pub const DISPLAY_POWER_INFO_EXT: Self = Self(1_000_091_000); pub const DEVICE_EVENT_INFO_EXT: Self = Self(1_000_091_001); pub const DISPLAY_EVENT_INFO_EXT: Self = Self(1_000_091_002); pub const SWAPCHAIN_COUNTER_CREATE_INFO_EXT: Self = Self(1_000_091_003); } #[doc = "Generated from 'VK_GOOGLE_display_timing'"] impl StructureType { pub const PRESENT_TIMES_INFO_GOOGLE: Self = Self(1_000_092_000); } #[doc = "Generated from 'VK_NVX_multiview_per_view_attributes'"] impl StructureType { pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: Self = Self(1_000_097_000); } #[doc = "Generated from 'VK_NVX_multiview_per_view_attributes'"] impl SubpassDescriptionFlags { pub const PER_VIEW_ATTRIBUTES_NVX: Self = Self(0b1); pub const PER_VIEW_POSITION_X_ONLY_NVX: Self = Self(0b10); } #[doc = "Generated from 'VK_NV_viewport_swizzle'"] impl StructureType { pub const PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: Self = Self(1_000_098_000); } #[doc = "Generated from 'VK_EXT_discard_rectangles'"] impl DynamicState { pub const DISCARD_RECTANGLE_EXT: Self = Self(1_000_099_000); pub const DISCARD_RECTANGLE_ENABLE_EXT: Self = Self(1_000_099_001); pub const DISCARD_RECTANGLE_MODE_EXT: Self = Self(1_000_099_002); } #[doc = "Generated from 'VK_EXT_discard_rectangles'"] impl StructureType { pub const PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: Self = Self(1_000_099_000); pub const PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: Self = Self(1_000_099_001); } #[doc = "Generated from 'VK_EXT_conservative_rasterization'"] impl StructureType { pub const PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT: Self = Self(1_000_101_000); pub const PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: Self = Self(1_000_101_001); } #[doc = "Generated from 'VK_EXT_depth_clip_enable'"] impl StructureType { pub const PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: Self = Self(1_000_102_000); pub const PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: Self = Self(1_000_102_001); } #[doc = "Generated from 'VK_EXT_swapchain_colorspace'"] impl ColorSpaceKHR { pub const DISPLAY_P3_NONLINEAR_EXT: Self = Self(1_000_104_001); pub const EXTENDED_SRGB_LINEAR_EXT: Self = Self(1_000_104_002); pub const DISPLAY_P3_LINEAR_EXT: Self = Self(1_000_104_003); pub const DCI_P3_NONLINEAR_EXT: Self = Self(1_000_104_004); pub const BT709_LINEAR_EXT: Self = Self(1_000_104_005); pub const BT709_NONLINEAR_EXT: Self = Self(1_000_104_006); pub const BT2020_LINEAR_EXT: Self = Self(1_000_104_007); pub const HDR10_ST2084_EXT: Self = Self(1_000_104_008); pub const DOLBYVISION_EXT: Self = Self(1_000_104_009); pub const HDR10_HLG_EXT: Self = Self(1_000_104_010); pub const ADOBERGB_LINEAR_EXT: Self = Self(1_000_104_011); pub const ADOBERGB_NONLINEAR_EXT: Self = Self(1_000_104_012); pub const PASS_THROUGH_EXT: Self = Self(1_000_104_013); pub const EXTENDED_SRGB_NONLINEAR_EXT: Self = Self(1_000_104_014); } #[doc = "Generated from 'VK_EXT_hdr_metadata'"] impl StructureType { pub const HDR_METADATA_EXT: Self = Self(1_000_105_000); } #[doc = "Generated from 'VK_KHR_imageless_framebuffer'"] impl FramebufferCreateFlags { pub const IMAGELESS_KHR: Self = Self::IMAGELESS; } #[doc = "Generated from 'VK_KHR_imageless_framebuffer'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES; pub const FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR: Self = Self::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO; pub const FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR: Self = Self::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO; pub const RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: Self = Self::RENDER_PASS_ATTACHMENT_BEGIN_INFO; } #[doc = "Generated from 'VK_KHR_create_renderpass2'"] impl StructureType { pub const ATTACHMENT_DESCRIPTION_2_KHR: Self = Self::ATTACHMENT_DESCRIPTION_2; pub const ATTACHMENT_REFERENCE_2_KHR: Self = Self::ATTACHMENT_REFERENCE_2; pub const SUBPASS_DESCRIPTION_2_KHR: Self = Self::SUBPASS_DESCRIPTION_2; pub const SUBPASS_DEPENDENCY_2_KHR: Self = Self::SUBPASS_DEPENDENCY_2; pub const RENDER_PASS_CREATE_INFO_2_KHR: Self = Self::RENDER_PASS_CREATE_INFO_2; pub const SUBPASS_BEGIN_INFO_KHR: Self = Self::SUBPASS_BEGIN_INFO; pub const SUBPASS_END_INFO_KHR: Self = Self::SUBPASS_END_INFO; } #[doc = "Generated from 'VK_IMG_relaxed_line_rasterization'"] impl StructureType { pub const PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG: Self = Self(1_000_110_000); } #[doc = "Generated from 'VK_KHR_shared_presentable_image'"] impl ImageLayout { pub const SHARED_PRESENT_KHR: Self = Self(1_000_111_000); } #[doc = "Generated from 'VK_KHR_shared_presentable_image'"] impl PresentModeKHR { pub const SHARED_DEMAND_REFRESH: Self = Self(1_000_111_000); pub const SHARED_CONTINUOUS_REFRESH: Self = Self(1_000_111_001); } #[doc = "Generated from 'VK_KHR_shared_presentable_image'"] impl StructureType { pub const SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: Self = Self(1_000_111_000); } #[doc = "Generated from 'VK_KHR_external_fence_capabilities'"] impl ExternalFenceFeatureFlags { pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE; pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE; } #[doc = "Generated from 'VK_KHR_external_fence_capabilities'"] impl ExternalFenceHandleTypeFlags { pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD; pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32; pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT; pub const SYNC_FD_KHR: Self = Self::SYNC_FD; } #[doc = "Generated from 'VK_KHR_external_fence_capabilities'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR: Self = Self::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO; pub const EXTERNAL_FENCE_PROPERTIES_KHR: Self = Self::EXTERNAL_FENCE_PROPERTIES; } #[doc = "Generated from 'VK_KHR_external_fence'"] impl FenceImportFlags { pub const TEMPORARY_KHR: Self = Self::TEMPORARY; } #[doc = "Generated from 'VK_KHR_external_fence'"] impl StructureType { pub const EXPORT_FENCE_CREATE_INFO_KHR: Self = Self::EXPORT_FENCE_CREATE_INFO; } #[doc = "Generated from 'VK_KHR_external_fence_win32'"] impl StructureType { pub const IMPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_000); pub const EXPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_001); pub const FENCE_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_002); } #[doc = "Generated from 'VK_KHR_external_fence_fd'"] impl StructureType { pub const IMPORT_FENCE_FD_INFO_KHR: Self = Self(1_000_115_000); pub const FENCE_GET_FD_INFO_KHR: Self = Self(1_000_115_001); } #[doc = "Generated from 'VK_KHR_performance_query'"] impl QueryType { pub const PERFORMANCE_QUERY_KHR: Self = Self(1_000_116_000); } #[doc = "Generated from 'VK_KHR_performance_query'"] impl StructureType { pub const PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: Self = Self(1_000_116_000); pub const PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: Self = Self(1_000_116_001); pub const QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: Self = Self(1_000_116_002); pub const PERFORMANCE_QUERY_SUBMIT_INFO_KHR: Self = Self(1_000_116_003); pub const ACQUIRE_PROFILING_LOCK_INFO_KHR: Self = Self(1_000_116_004); pub const PERFORMANCE_COUNTER_KHR: Self = Self(1_000_116_005); pub const PERFORMANCE_COUNTER_DESCRIPTION_KHR: Self = Self(1_000_116_006); } #[doc = "Generated from 'VK_KHR_maintenance2'"] impl ImageCreateFlags { pub const BLOCK_TEXEL_VIEW_COMPATIBLE_KHR: Self = Self::BLOCK_TEXEL_VIEW_COMPATIBLE; pub const EXTENDED_USAGE_KHR: Self = Self::EXTENDED_USAGE; } #[doc = "Generated from 'VK_KHR_maintenance2'"] impl ImageLayout { pub const DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR: Self = Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL; pub const DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR: Self = Self::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL; } #[doc = "Generated from 'VK_KHR_maintenance2'"] impl PointClippingBehavior { pub const ALL_CLIP_PLANES_KHR: Self = Self::ALL_CLIP_PLANES; pub const USER_CLIP_PLANES_ONLY_KHR: Self = Self::USER_CLIP_PLANES_ONLY; } #[doc = "Generated from 'VK_KHR_maintenance2'"] impl StructureType { pub const PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES; pub const RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR: Self = Self::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO; pub const IMAGE_VIEW_USAGE_CREATE_INFO_KHR: Self = Self::IMAGE_VIEW_USAGE_CREATE_INFO; pub const PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR: Self = Self::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO; } #[doc = "Generated from 'VK_KHR_maintenance2'"] impl TessellationDomainOrigin { pub const UPPER_LEFT_KHR: Self = Self::UPPER_LEFT; pub const LOWER_LEFT_KHR: Self = Self::LOWER_LEFT; } #[doc = "Generated from 'VK_KHR_get_surface_capabilities2'"] impl StructureType { pub const PHYSICAL_DEVICE_SURFACE_INFO_2_KHR: Self = Self(1_000_119_000); pub const SURFACE_CAPABILITIES_2_KHR: Self = Self(1_000_119_001); pub const SURFACE_FORMAT_2_KHR: Self = Self(1_000_119_002); } #[doc = "Generated from 'VK_KHR_variable_pointers'"] impl StructureType { pub const PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES; pub const PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR; } #[doc = "Generated from 'VK_KHR_get_display_properties2'"] impl StructureType { pub const DISPLAY_PROPERTIES_2_KHR: Self = Self(1_000_121_000); pub const DISPLAY_PLANE_PROPERTIES_2_KHR: Self = Self(1_000_121_001); pub const DISPLAY_MODE_PROPERTIES_2_KHR: Self = Self(1_000_121_002); pub const DISPLAY_PLANE_INFO_2_KHR: Self = Self(1_000_121_003); pub const DISPLAY_PLANE_CAPABILITIES_2_KHR: Self = Self(1_000_121_004); } #[doc = "Generated from 'VK_MVK_ios_surface'"] impl StructureType { pub const IOS_SURFACE_CREATE_INFO_MVK: Self = Self(1_000_122_000); } #[doc = "Generated from 'VK_MVK_macos_surface'"] impl StructureType { pub const MACOS_SURFACE_CREATE_INFO_MVK: Self = Self(1_000_123_000); } #[doc = "Generated from 'VK_EXT_external_memory_dma_buf'"] impl ExternalMemoryHandleTypeFlags { pub const DMA_BUF_EXT: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_KHR_dedicated_allocation'"] impl StructureType { pub const MEMORY_DEDICATED_REQUIREMENTS_KHR: Self = Self::MEMORY_DEDICATED_REQUIREMENTS; pub const MEMORY_DEDICATED_ALLOCATE_INFO_KHR: Self = Self::MEMORY_DEDICATED_ALLOCATE_INFO; } #[doc = "Generated from 'VK_EXT_debug_utils'"] impl ObjectType { pub const DEBUG_UTILS_MESSENGER_EXT: Self = Self(1_000_128_000); } #[doc = "Generated from 'VK_EXT_debug_utils'"] impl StructureType { pub const DEBUG_UTILS_OBJECT_NAME_INFO_EXT: Self = Self(1_000_128_000); pub const DEBUG_UTILS_OBJECT_TAG_INFO_EXT: Self = Self(1_000_128_001); pub const DEBUG_UTILS_LABEL_EXT: Self = Self(1_000_128_002); pub const DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT: Self = Self(1_000_128_003); pub const DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: Self = Self(1_000_128_004); } #[doc = "Generated from 'VK_ANDROID_external_memory_android_hardware_buffer'"] impl ExternalMemoryHandleTypeFlags { pub const ANDROID_HARDWARE_BUFFER_ANDROID: Self = Self(0b100_0000_0000); } #[doc = "Generated from 'VK_ANDROID_external_memory_android_hardware_buffer'"] impl StructureType { pub const ANDROID_HARDWARE_BUFFER_USAGE_ANDROID: Self = Self(1_000_129_000); pub const ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID: Self = Self(1_000_129_001); pub const ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID: Self = Self(1_000_129_002); pub const IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = Self(1_000_129_003); pub const MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = Self(1_000_129_004); pub const EXTERNAL_FORMAT_ANDROID: Self = Self(1_000_129_005); pub const ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID: Self = Self(1_000_129_006); } #[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"] impl FormatFeatureFlags { pub const SAMPLED_IMAGE_FILTER_MINMAX_EXT: Self = Self::SAMPLED_IMAGE_FILTER_MINMAX; } #[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"] impl SamplerReductionMode { pub const WEIGHTED_AVERAGE_EXT: Self = Self::WEIGHTED_AVERAGE; pub const MIN_EXT: Self = Self::MIN; pub const MAX_EXT: Self = Self::MAX; } #[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"] impl StructureType { pub const PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES; pub const SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT: Self = Self::SAMPLER_REDUCTION_MODE_CREATE_INFO; } #[doc = "Generated from 'VK_AMDX_shader_enqueue'"] impl BufferUsageFlags { pub const EXECUTION_GRAPH_SCRATCH_AMDX: Self = Self(0b10_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_AMDX_shader_enqueue'"] impl BufferUsageFlags2KHR { pub const EXECUTION_GRAPH_SCRATCH_AMDX: Self = Self(0b10_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_AMDX_shader_enqueue'"] impl PipelineBindPoint { pub const EXECUTION_GRAPH_AMDX: Self = Self(1_000_134_000); } #[doc = "Generated from 'VK_AMDX_shader_enqueue'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX: Self = Self(1_000_134_000); pub const PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX: Self = Self(1_000_134_001); pub const EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX: Self = Self(1_000_134_002); pub const EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX: Self = Self(1_000_134_003); pub const PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX: Self = Self(1_000_134_004); } #[doc = "Generated from 'VK_EXT_inline_uniform_block'"] impl DescriptorType { pub const INLINE_UNIFORM_BLOCK_EXT: Self = Self::INLINE_UNIFORM_BLOCK; } #[doc = "Generated from 'VK_EXT_inline_uniform_block'"] impl StructureType { pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES; pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES; pub const WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: Self = Self::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK; pub const DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT: Self = Self::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO; } #[doc = "Generated from 'VK_EXT_sample_locations'"] impl DynamicState { pub const SAMPLE_LOCATIONS_EXT: Self = Self(1_000_143_000); } #[doc = "Generated from 'VK_EXT_sample_locations'"] impl ImageCreateFlags { pub const SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT: Self = Self(0b1_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_sample_locations'"] impl StructureType { pub const SAMPLE_LOCATIONS_INFO_EXT: Self = Self(1_000_143_000); pub const RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: Self = Self(1_000_143_001); pub const PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: Self = Self(1_000_143_002); pub const PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: Self = Self(1_000_143_003); pub const MULTISAMPLE_PROPERTIES_EXT: Self = Self(1_000_143_004); } #[doc = "Generated from 'VK_KHR_get_memory_requirements2'"] impl StructureType { pub const BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR: Self = Self::BUFFER_MEMORY_REQUIREMENTS_INFO_2; pub const IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR: Self = Self::IMAGE_MEMORY_REQUIREMENTS_INFO_2; pub const IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR: Self = Self::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2; pub const MEMORY_REQUIREMENTS_2_KHR: Self = Self::MEMORY_REQUIREMENTS_2; pub const SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR: Self = Self::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2; } #[doc = "Generated from 'VK_KHR_image_format_list'"] impl StructureType { pub const IMAGE_FORMAT_LIST_CREATE_INFO_KHR: Self = Self::IMAGE_FORMAT_LIST_CREATE_INFO; } #[doc = "Generated from 'VK_EXT_blend_operation_advanced'"] impl AccessFlags { pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = Self(0b1000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_blend_operation_advanced'"] impl BlendOp { pub const ZERO_EXT: Self = Self(1_000_148_000); pub const SRC_EXT: Self = Self(1_000_148_001); pub const DST_EXT: Self = Self(1_000_148_002); pub const SRC_OVER_EXT: Self = Self(1_000_148_003); pub const DST_OVER_EXT: Self = Self(1_000_148_004); pub const SRC_IN_EXT: Self = Self(1_000_148_005); pub const DST_IN_EXT: Self = Self(1_000_148_006); pub const SRC_OUT_EXT: Self = Self(1_000_148_007); pub const DST_OUT_EXT: Self = Self(1_000_148_008); pub const SRC_ATOP_EXT: Self = Self(1_000_148_009); pub const DST_ATOP_EXT: Self = Self(1_000_148_010); pub const XOR_EXT: Self = Self(1_000_148_011); pub const MULTIPLY_EXT: Self = Self(1_000_148_012); pub const SCREEN_EXT: Self = Self(1_000_148_013); pub const OVERLAY_EXT: Self = Self(1_000_148_014); pub const DARKEN_EXT: Self = Self(1_000_148_015); pub const LIGHTEN_EXT: Self = Self(1_000_148_016); pub const COLORDODGE_EXT: Self = Self(1_000_148_017); pub const COLORBURN_EXT: Self = Self(1_000_148_018); pub const HARDLIGHT_EXT: Self = Self(1_000_148_019); pub const SOFTLIGHT_EXT: Self = Self(1_000_148_020); pub const DIFFERENCE_EXT: Self = Self(1_000_148_021); pub const EXCLUSION_EXT: Self = Self(1_000_148_022); pub const INVERT_EXT: Self = Self(1_000_148_023); pub const INVERT_RGB_EXT: Self = Self(1_000_148_024); pub const LINEARDODGE_EXT: Self = Self(1_000_148_025); pub const LINEARBURN_EXT: Self = Self(1_000_148_026); pub const VIVIDLIGHT_EXT: Self = Self(1_000_148_027); pub const LINEARLIGHT_EXT: Self = Self(1_000_148_028); pub const PINLIGHT_EXT: Self = Self(1_000_148_029); pub const HARDMIX_EXT: Self = Self(1_000_148_030); pub const HSL_HUE_EXT: Self = Self(1_000_148_031); pub const HSL_SATURATION_EXT: Self = Self(1_000_148_032); pub const HSL_COLOR_EXT: Self = Self(1_000_148_033); pub const HSL_LUMINOSITY_EXT: Self = Self(1_000_148_034); pub const PLUS_EXT: Self = Self(1_000_148_035); pub const PLUS_CLAMPED_EXT: Self = Self(1_000_148_036); pub const PLUS_CLAMPED_ALPHA_EXT: Self = Self(1_000_148_037); pub const PLUS_DARKER_EXT: Self = Self(1_000_148_038); pub const MINUS_EXT: Self = Self(1_000_148_039); pub const MINUS_CLAMPED_EXT: Self = Self(1_000_148_040); pub const CONTRAST_EXT: Self = Self(1_000_148_041); pub const INVERT_OVG_EXT: Self = Self(1_000_148_042); pub const RED_EXT: Self = Self(1_000_148_043); pub const GREEN_EXT: Self = Self(1_000_148_044); pub const BLUE_EXT: Self = Self(1_000_148_045); } #[doc = "Generated from 'VK_EXT_blend_operation_advanced'"] impl StructureType { pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: Self = Self(1_000_148_000); pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT: Self = Self(1_000_148_001); pub const PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: Self = Self(1_000_148_002); } #[doc = "Generated from 'VK_NV_fragment_coverage_to_color'"] impl StructureType { pub const PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: Self = Self(1_000_149_000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl AccessFlags { pub const ACCELERATION_STRUCTURE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_WRITE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl BufferUsageFlags { pub const ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR: Self = Self(0b1000_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_STORAGE_KHR: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl DebugReportObjectTypeEXT { pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl DescriptorType { pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl FormatFeatureFlags { pub const ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl FormatFeatureFlags2 { pub const ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl IndexType { pub const NONE_KHR: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl ObjectType { pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl PipelineStageFlags { pub const ACCELERATION_STRUCTURE_BUILD_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl QueryType { pub const ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: Self = Self(1_000_150_000); pub const ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR: Self = Self(1_000_150_001); } #[doc = "Generated from 'VK_KHR_acceleration_structure'"] impl StructureType { pub const WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_007); pub const ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR: Self = Self(1_000_150_000); pub const ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR: Self = Self(1_000_150_002); pub const ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR: Self = Self(1_000_150_003); pub const ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR: Self = Self(1_000_150_004); pub const ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR: Self = Self(1_000_150_005); pub const ACCELERATION_STRUCTURE_GEOMETRY_KHR: Self = Self(1_000_150_006); pub const ACCELERATION_STRUCTURE_VERSION_INFO_KHR: Self = Self(1_000_150_009); pub const COPY_ACCELERATION_STRUCTURE_INFO_KHR: Self = Self(1_000_150_010); pub const COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR: Self = Self(1_000_150_011); pub const COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR: Self = Self(1_000_150_012); pub const PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR: Self = Self(1_000_150_013); pub const PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR: Self = Self(1_000_150_014); pub const ACCELERATION_STRUCTURE_CREATE_INFO_KHR: Self = Self(1_000_150_017); pub const ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR: Self = Self(1_000_150_020); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl BufferUsageFlags { pub const SHADER_BINDING_TABLE_KHR: Self = Self(0b100_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl DynamicState { pub const RAY_TRACING_PIPELINE_STACK_SIZE_KHR: Self = Self(1_000_347_000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl PipelineBindPoint { pub const RAY_TRACING_KHR: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl PipelineCreateFlags { pub const RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR: Self = Self(0b100_0000_0000_0000); pub const RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR: Self = Self(0b1000_0000_0000_0000); pub const RAY_TRACING_NO_NULL_MISS_SHADERS_KHR: Self = Self(0b1_0000_0000_0000_0000); pub const RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR: Self = Self(0b10_0000_0000_0000_0000); pub const RAY_TRACING_SKIP_TRIANGLES_KHR: Self = Self(0b1_0000_0000_0000); pub const RAY_TRACING_SKIP_AABBS_KHR: Self = Self(0b10_0000_0000_0000); pub const RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR: Self = Self(0b1000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl PipelineStageFlags { pub const RAY_TRACING_SHADER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl ShaderStageFlags { pub const RAYGEN_KHR: Self = Self(0b1_0000_0000); pub const ANY_HIT_KHR: Self = Self(0b10_0000_0000); pub const CLOSEST_HIT_KHR: Self = Self(0b100_0000_0000); pub const MISS_KHR: Self = Self(0b1000_0000_0000); pub const INTERSECTION_KHR: Self = Self(0b1_0000_0000_0000); pub const CALLABLE_KHR: Self = Self(0b10_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR: Self = Self(1_000_347_000); pub const PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR: Self = Self(1_000_347_001); pub const RAY_TRACING_PIPELINE_CREATE_INFO_KHR: Self = Self(1_000_150_015); pub const RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR: Self = Self(1_000_150_016); pub const RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR: Self = Self(1_000_150_018); } #[doc = "Generated from 'VK_KHR_ray_query'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR: Self = Self(1_000_348_013); } #[doc = "Generated from 'VK_NV_framebuffer_mixed_samples'"] impl StructureType { pub const PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: Self = Self(1_000_152_000); } #[doc = "Generated from 'VK_NV_fill_rectangle'"] impl PolygonMode { pub const FILL_RECTANGLE_NV: Self = Self(1_000_153_000); } #[doc = "Generated from 'VK_NV_shader_sm_builtins'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: Self = Self(1_000_154_000); pub const PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV: Self = Self(1_000_154_001); } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl ChromaLocation { pub const COSITED_EVEN_KHR: Self = Self::COSITED_EVEN; pub const MIDPOINT_KHR: Self = Self::MIDPOINT; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl DebugReportObjectTypeEXT { pub const SAMPLER_YCBCR_CONVERSION_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl Format { pub const G8B8G8R8_422_UNORM_KHR: Self = Self::G8B8G8R8_422_UNORM; pub const B8G8R8G8_422_UNORM_KHR: Self = Self::B8G8R8G8_422_UNORM; pub const G8_B8_R8_3PLANE_420_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_420_UNORM; pub const G8_B8R8_2PLANE_420_UNORM_KHR: Self = Self::G8_B8R8_2PLANE_420_UNORM; pub const G8_B8_R8_3PLANE_422_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_422_UNORM; pub const G8_B8R8_2PLANE_422_UNORM_KHR: Self = Self::G8_B8R8_2PLANE_422_UNORM; pub const G8_B8_R8_3PLANE_444_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_444_UNORM; pub const R10X6_UNORM_PACK16_KHR: Self = Self::R10X6_UNORM_PACK16; pub const R10X6G10X6_UNORM_2PACK16_KHR: Self = Self::R10X6G10X6_UNORM_2PACK16; pub const R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR: Self = Self::R10X6G10X6B10X6A10X6_UNORM_4PACK16; pub const G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR: Self = Self::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16; pub const B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR: Self = Self::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16; pub const G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR: Self = Self::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16; pub const G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR: Self = Self::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16; pub const G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR: Self = Self::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16; pub const G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR: Self = Self::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16; pub const G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR: Self = Self::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16; pub const R12X4_UNORM_PACK16_KHR: Self = Self::R12X4_UNORM_PACK16; pub const R12X4G12X4_UNORM_2PACK16_KHR: Self = Self::R12X4G12X4_UNORM_2PACK16; pub const R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR: Self = Self::R12X4G12X4B12X4A12X4_UNORM_4PACK16; pub const G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR: Self = Self::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16; pub const B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR: Self = Self::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16; pub const G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR: Self = Self::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16; pub const G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR: Self = Self::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16; pub const G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR: Self = Self::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16; pub const G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR: Self = Self::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16; pub const G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR: Self = Self::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16; pub const G16B16G16R16_422_UNORM_KHR: Self = Self::G16B16G16R16_422_UNORM; pub const B16G16R16G16_422_UNORM_KHR: Self = Self::B16G16R16G16_422_UNORM; pub const G16_B16_R16_3PLANE_420_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_420_UNORM; pub const G16_B16R16_2PLANE_420_UNORM_KHR: Self = Self::G16_B16R16_2PLANE_420_UNORM; pub const G16_B16_R16_3PLANE_422_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_422_UNORM; pub const G16_B16R16_2PLANE_422_UNORM_KHR: Self = Self::G16_B16R16_2PLANE_422_UNORM; pub const G16_B16_R16_3PLANE_444_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_444_UNORM; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl FormatFeatureFlags { pub const MIDPOINT_CHROMA_SAMPLES_KHR: Self = Self::MIDPOINT_CHROMA_SAMPLES; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT; pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_KHR: Self = Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE; pub const DISJOINT_KHR: Self = Self::DISJOINT; pub const COSITED_CHROMA_SAMPLES_KHR: Self = Self::COSITED_CHROMA_SAMPLES; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl ImageAspectFlags { pub const PLANE_0_KHR: Self = Self::PLANE_0; pub const PLANE_1_KHR: Self = Self::PLANE_1; pub const PLANE_2_KHR: Self = Self::PLANE_2; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl ImageCreateFlags { pub const DISJOINT_KHR: Self = Self::DISJOINT; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl ObjectType { pub const SAMPLER_YCBCR_CONVERSION_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl SamplerYcbcrModelConversion { pub const RGB_IDENTITY_KHR: Self = Self::RGB_IDENTITY; pub const YCBCR_IDENTITY_KHR: Self = Self::YCBCR_IDENTITY; pub const YCBCR_709_KHR: Self = Self::YCBCR_709; pub const YCBCR_601_KHR: Self = Self::YCBCR_601; pub const YCBCR_2020_KHR: Self = Self::YCBCR_2020; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl SamplerYcbcrRange { pub const ITU_FULL_KHR: Self = Self::ITU_FULL; pub const ITU_NARROW_KHR: Self = Self::ITU_NARROW; } #[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"] impl StructureType { pub const SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION_CREATE_INFO; pub const SAMPLER_YCBCR_CONVERSION_INFO_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION_INFO; pub const BIND_IMAGE_PLANE_MEMORY_INFO_KHR: Self = Self::BIND_IMAGE_PLANE_MEMORY_INFO; pub const IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR: Self = Self::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO; pub const PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES; pub const SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES; } #[doc = "Generated from 'VK_KHR_bind_memory2'"] impl ImageCreateFlags { pub const ALIAS_KHR: Self = Self::ALIAS; } #[doc = "Generated from 'VK_KHR_bind_memory2'"] impl StructureType { pub const BIND_BUFFER_MEMORY_INFO_KHR: Self = Self::BIND_BUFFER_MEMORY_INFO; pub const BIND_IMAGE_MEMORY_INFO_KHR: Self = Self::BIND_IMAGE_MEMORY_INFO; } #[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"] impl ImageAspectFlags { pub const MEMORY_PLANE_0_EXT: Self = Self(0b1000_0000); pub const MEMORY_PLANE_1_EXT: Self = Self(0b1_0000_0000); pub const MEMORY_PLANE_2_EXT: Self = Self(0b10_0000_0000); pub const MEMORY_PLANE_3_EXT: Self = Self(0b100_0000_0000); } #[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"] impl ImageTiling { pub const DRM_FORMAT_MODIFIER_EXT: Self = Self(1_000_158_000); } #[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"] impl Result { pub const ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: Self = Self(-1_000_158_000); } #[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"] impl StructureType { pub const DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT: Self = Self(1_000_158_000); pub const PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: Self = Self(1_000_158_002); pub const IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: Self = Self(1_000_158_003); pub const IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: Self = Self(1_000_158_004); pub const IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT: Self = Self(1_000_158_005); pub const DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT: Self = Self(1_000_158_006); } #[doc = "Generated from 'VK_EXT_validation_cache'"] impl ObjectType { pub const VALIDATION_CACHE_EXT: Self = Self(1_000_160_000); } #[doc = "Generated from 'VK_EXT_validation_cache'"] impl StructureType { pub const VALIDATION_CACHE_CREATE_INFO_EXT: Self = Self(1_000_160_000); pub const SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: Self = Self(1_000_160_001); } #[doc = "Generated from 'VK_EXT_descriptor_indexing'"] impl DescriptorBindingFlags { pub const UPDATE_AFTER_BIND_EXT: Self = Self::UPDATE_AFTER_BIND; pub const UPDATE_UNUSED_WHILE_PENDING_EXT: Self = Self::UPDATE_UNUSED_WHILE_PENDING; pub const PARTIALLY_BOUND_EXT: Self = Self::PARTIALLY_BOUND; pub const VARIABLE_DESCRIPTOR_COUNT_EXT: Self = Self::VARIABLE_DESCRIPTOR_COUNT; } #[doc = "Generated from 'VK_EXT_descriptor_indexing'"] impl DescriptorPoolCreateFlags { pub const UPDATE_AFTER_BIND_EXT: Self = Self::UPDATE_AFTER_BIND; } #[doc = "Generated from 'VK_EXT_descriptor_indexing'"] impl DescriptorSetLayoutCreateFlags { pub const UPDATE_AFTER_BIND_POOL_EXT: Self = Self::UPDATE_AFTER_BIND_POOL; } #[doc = "Generated from 'VK_EXT_descriptor_indexing'"] impl Result { pub const ERROR_FRAGMENTATION_EXT: Self = Self::ERROR_FRAGMENTATION; } #[doc = "Generated from 'VK_EXT_descriptor_indexing'"] impl StructureType { pub const DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT: Self = Self::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO; pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES; pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES; pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT: Self = Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO; pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT: Self = Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT; } #[doc = "Generated from 'VK_KHR_portability_subset'"] impl StructureType { pub const PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR: Self = Self(1_000_163_000); pub const PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR: Self = Self(1_000_163_001); } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl AccessFlags { pub const SHADING_RATE_IMAGE_READ_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR; } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl DynamicState { pub const VIEWPORT_SHADING_RATE_PALETTE_NV: Self = Self(1_000_164_004); pub const VIEWPORT_COARSE_SAMPLE_ORDER_NV: Self = Self(1_000_164_006); } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl ImageLayout { pub const SHADING_RATE_OPTIMAL_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR; } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl ImageUsageFlags { pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR; } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl PipelineStageFlags { pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR; } #[doc = "Generated from 'VK_NV_shading_rate_image'"] impl StructureType { pub const PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: Self = Self(1_000_164_000); pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: Self = Self(1_000_164_001); pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV: Self = Self(1_000_164_002); pub const PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: Self = Self(1_000_164_005); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl AccelerationStructureTypeKHR { pub const TOP_LEVEL_NV: Self = Self::TOP_LEVEL; pub const BOTTOM_LEVEL_NV: Self = Self::BOTTOM_LEVEL; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl AccessFlags { pub const ACCELERATION_STRUCTURE_READ_NV: Self = Self::ACCELERATION_STRUCTURE_READ_KHR; pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = Self::ACCELERATION_STRUCTURE_WRITE_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl BufferUsageFlags { pub const RAY_TRACING_NV: Self = Self::SHADER_BINDING_TABLE_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl BuildAccelerationStructureFlagsKHR { pub const ALLOW_UPDATE_NV: Self = Self::ALLOW_UPDATE; pub const ALLOW_COMPACTION_NV: Self = Self::ALLOW_COMPACTION; pub const PREFER_FAST_TRACE_NV: Self = Self::PREFER_FAST_TRACE; pub const PREFER_FAST_BUILD_NV: Self = Self::PREFER_FAST_BUILD; pub const LOW_MEMORY_NV: Self = Self::LOW_MEMORY; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl CopyAccelerationStructureModeKHR { pub const CLONE_NV: Self = Self::CLONE; pub const COMPACT_NV: Self = Self::COMPACT; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl DebugReportObjectTypeEXT { pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl DescriptorType { pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl GeometryFlagsKHR { pub const OPAQUE_NV: Self = Self::OPAQUE; pub const NO_DUPLICATE_ANY_HIT_INVOCATION_NV: Self = Self::NO_DUPLICATE_ANY_HIT_INVOCATION; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl GeometryInstanceFlagsKHR { pub const TRIANGLE_CULL_DISABLE_NV: Self = Self::TRIANGLE_FACING_CULL_DISABLE; pub const TRIANGLE_FRONT_COUNTERCLOCKWISE_NV: Self = Self::TRIANGLE_FRONT_COUNTERCLOCKWISE; pub const FORCE_OPAQUE_NV: Self = Self::FORCE_OPAQUE; pub const FORCE_NO_OPAQUE_NV: Self = Self::FORCE_NO_OPAQUE; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl GeometryTypeKHR { pub const TRIANGLES_NV: Self = Self::TRIANGLES; pub const AABBS_NV: Self = Self::AABBS; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl IndexType { pub const NONE_NV: Self = Self::NONE_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl ObjectType { pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl PipelineBindPoint { pub const RAY_TRACING_NV: Self = Self::RAY_TRACING_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl PipelineCreateFlags { pub const DEFER_COMPILE_NV: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl PipelineStageFlags { pub const RAY_TRACING_SHADER_NV: Self = Self::RAY_TRACING_SHADER_KHR; pub const ACCELERATION_STRUCTURE_BUILD_NV: Self = Self::ACCELERATION_STRUCTURE_BUILD_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl QueryType { pub const ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: Self = Self(1_000_165_000); } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl RayTracingShaderGroupTypeKHR { pub const GENERAL_NV: Self = Self::GENERAL; pub const TRIANGLES_HIT_GROUP_NV: Self = Self::TRIANGLES_HIT_GROUP; pub const PROCEDURAL_HIT_GROUP_NV: Self = Self::PROCEDURAL_HIT_GROUP; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl ShaderStageFlags { pub const RAYGEN_NV: Self = Self::RAYGEN_KHR; pub const ANY_HIT_NV: Self = Self::ANY_HIT_KHR; pub const CLOSEST_HIT_NV: Self = Self::CLOSEST_HIT_KHR; pub const MISS_NV: Self = Self::MISS_KHR; pub const INTERSECTION_NV: Self = Self::INTERSECTION_KHR; pub const CALLABLE_NV: Self = Self::CALLABLE_KHR; } #[doc = "Generated from 'VK_NV_ray_tracing'"] impl StructureType { pub const RAY_TRACING_PIPELINE_CREATE_INFO_NV: Self = Self(1_000_165_000); pub const ACCELERATION_STRUCTURE_CREATE_INFO_NV: Self = Self(1_000_165_001); pub const GEOMETRY_NV: Self = Self(1_000_165_003); pub const GEOMETRY_TRIANGLES_NV: Self = Self(1_000_165_004); pub const GEOMETRY_AABB_NV: Self = Self(1_000_165_005); pub const BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: Self = Self(1_000_165_006); pub const WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_007); pub const ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: Self = Self(1_000_165_008); pub const PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: Self = Self(1_000_165_009); pub const RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: Self = Self(1_000_165_011); pub const ACCELERATION_STRUCTURE_INFO_NV: Self = Self(1_000_165_012); } #[doc = "Generated from 'VK_NV_representative_fragment_test'"] impl StructureType { pub const PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: Self = Self(1_000_166_000); pub const PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: Self = Self(1_000_166_001); } #[doc = "Generated from 'VK_KHR_maintenance3'"] impl StructureType { pub const PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES; pub const DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR: Self = Self::DESCRIPTOR_SET_LAYOUT_SUPPORT; } #[doc = "Generated from 'VK_EXT_filter_cubic'"] impl Filter { pub const CUBIC_EXT: Self = Self(1_000_015_000); } #[doc = "Generated from 'VK_EXT_filter_cubic'"] impl FormatFeatureFlags { pub const SAMPLED_IMAGE_FILTER_CUBIC_EXT: Self = Self(0b10_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_filter_cubic'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: Self = Self(1_000_170_000); pub const FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT: Self = Self(1_000_170_001); } #[doc = "Generated from 'VK_QCOM_render_pass_shader_resolve'"] impl SubpassDescriptionFlags { pub const FRAGMENT_REGION_QCOM: Self = Self(0b100); pub const SHADER_RESOLVE_QCOM: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_global_priority'"] impl Result { pub const ERROR_NOT_PERMITTED_EXT: Self = Self::ERROR_NOT_PERMITTED_KHR; } #[doc = "Generated from 'VK_EXT_global_priority'"] impl StructureType { pub const DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: Self = Self::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR; } #[doc = "Generated from 'VK_KHR_shader_subgroup_extended_types'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES; } #[doc = "Generated from 'VK_KHR_8bit_storage'"] impl StructureType { pub const PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES; } #[doc = "Generated from 'VK_EXT_external_memory_host'"] impl ExternalMemoryHandleTypeFlags { pub const HOST_ALLOCATION_EXT: Self = Self(0b1000_0000); pub const HOST_MAPPED_FOREIGN_MEMORY_EXT: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_EXT_external_memory_host'"] impl StructureType { pub const IMPORT_MEMORY_HOST_POINTER_INFO_EXT: Self = Self(1_000_178_000); pub const MEMORY_HOST_POINTER_PROPERTIES_EXT: Self = Self(1_000_178_001); pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: Self = Self(1_000_178_002); } #[doc = "Generated from 'VK_KHR_shader_atomic_int64'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES; } #[doc = "Generated from 'VK_KHR_shader_clock'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: Self = Self(1_000_181_000); } #[doc = "Generated from 'VK_AMD_pipeline_compiler_control'"] impl StructureType { pub const PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: Self = Self(1_000_183_000); } #[doc = "Generated from 'VK_EXT_calibrated_timestamps'"] impl StructureType { pub const CALIBRATED_TIMESTAMP_INFO_EXT: Self = Self::CALIBRATED_TIMESTAMP_INFO_KHR; } #[doc = "Generated from 'VK_EXT_calibrated_timestamps'"] impl TimeDomainKHR { pub const DEVICE_EXT: Self = Self::DEVICE; pub const CLOCK_MONOTONIC_EXT: Self = Self::CLOCK_MONOTONIC; pub const CLOCK_MONOTONIC_RAW_EXT: Self = Self::CLOCK_MONOTONIC_RAW; pub const QUERY_PERFORMANCE_COUNTER_EXT: Self = Self::QUERY_PERFORMANCE_COUNTER; } #[doc = "Generated from 'VK_AMD_shader_core_properties'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD: Self = Self(1_000_185_000); } #[doc = "Generated from 'VK_KHR_video_decode_h265'"] impl StructureType { pub const VIDEO_DECODE_H265_CAPABILITIES_KHR: Self = Self(1_000_187_000); pub const VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_187_001); pub const VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_187_002); pub const VIDEO_DECODE_H265_PROFILE_INFO_KHR: Self = Self(1_000_187_003); pub const VIDEO_DECODE_H265_PICTURE_INFO_KHR: Self = Self(1_000_187_004); pub const VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR: Self = Self(1_000_187_005); } #[doc = "Generated from 'VK_KHR_video_decode_h265'"] impl VideoCodecOperationFlagsKHR { pub const DECODE_H265: Self = Self(0b10); } #[doc = "Generated from 'VK_KHR_global_priority'"] impl Result { pub const ERROR_NOT_PERMITTED_KHR: Self = Self(-1_000_174_001); } #[doc = "Generated from 'VK_KHR_global_priority'"] impl StructureType { pub const DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR: Self = Self(1_000_174_000); pub const PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR: Self = Self(1_000_388_000); pub const QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR: Self = Self(1_000_388_001); } #[doc = "Generated from 'VK_AMD_memory_overallocation_behavior'"] impl StructureType { pub const DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: Self = Self(1_000_189_000); } #[doc = "Generated from 'VK_EXT_vertex_attribute_divisor'"] impl StructureType { pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: Self = Self(1_000_190_000); pub const PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT: Self = Self::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR; pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR; } #[doc = "Generated from 'VK_GGP_frame_token'"] impl StructureType { pub const PRESENT_FRAME_TOKEN_GGP: Self = Self(1_000_191_000); } #[doc = "Generated from 'VK_EXT_pipeline_creation_feedback'"] impl StructureType { pub const PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT: Self = Self::PIPELINE_CREATION_FEEDBACK_CREATE_INFO; } #[doc = "Generated from 'VK_KHR_driver_properties'"] impl DriverId { pub const AMD_PROPRIETARY_KHR: Self = Self::AMD_PROPRIETARY; pub const AMD_OPEN_SOURCE_KHR: Self = Self::AMD_OPEN_SOURCE; pub const MESA_RADV_KHR: Self = Self::MESA_RADV; pub const NVIDIA_PROPRIETARY_KHR: Self = Self::NVIDIA_PROPRIETARY; pub const INTEL_PROPRIETARY_WINDOWS_KHR: Self = Self::INTEL_PROPRIETARY_WINDOWS; pub const INTEL_OPEN_SOURCE_MESA_KHR: Self = Self::INTEL_OPEN_SOURCE_MESA; pub const IMAGINATION_PROPRIETARY_KHR: Self = Self::IMAGINATION_PROPRIETARY; pub const QUALCOMM_PROPRIETARY_KHR: Self = Self::QUALCOMM_PROPRIETARY; pub const ARM_PROPRIETARY_KHR: Self = Self::ARM_PROPRIETARY; pub const GOOGLE_SWIFTSHADER_KHR: Self = Self::GOOGLE_SWIFTSHADER; pub const GGP_PROPRIETARY_KHR: Self = Self::GGP_PROPRIETARY; pub const BROADCOM_PROPRIETARY_KHR: Self = Self::BROADCOM_PROPRIETARY; } #[doc = "Generated from 'VK_KHR_driver_properties'"] impl StructureType { pub const PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_DRIVER_PROPERTIES; } #[doc = "Generated from 'VK_KHR_shader_float_controls'"] impl ShaderFloatControlsIndependence { pub const TYPE_32_ONLY_KHR: Self = Self::TYPE_32_ONLY; pub const ALL_KHR: Self = Self::ALL; pub const NONE_KHR: Self = Self::NONE; } #[doc = "Generated from 'VK_KHR_shader_float_controls'"] impl StructureType { pub const PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES; } #[doc = "Generated from 'VK_NV_shader_subgroup_partitioned'"] impl SubgroupFeatureFlags { pub const PARTITIONED_NV: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_KHR_depth_stencil_resolve'"] impl ResolveModeFlags { pub const NONE_KHR: Self = Self::NONE; pub const SAMPLE_ZERO_KHR: Self = Self::SAMPLE_ZERO; pub const AVERAGE_KHR: Self = Self::AVERAGE; pub const MIN_KHR: Self = Self::MIN; pub const MAX_KHR: Self = Self::MAX; } #[doc = "Generated from 'VK_KHR_depth_stencil_resolve'"] impl StructureType { pub const PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES; pub const SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR: Self = Self::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE; } #[doc = "Generated from 'VK_KHR_swapchain_mutable_format'"] impl SwapchainCreateFlagsKHR { pub const MUTABLE_FORMAT: Self = Self(0b100); } #[doc = "Generated from 'VK_NV_compute_shader_derivatives'"] impl StructureType { pub const PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV: Self = Self(1_000_201_000); } #[doc = "Generated from 'VK_NV_mesh_shader'"] impl PipelineStageFlags { pub const TASK_SHADER_NV: Self = Self::TASK_SHADER_EXT; pub const MESH_SHADER_NV: Self = Self::MESH_SHADER_EXT; } #[doc = "Generated from 'VK_NV_mesh_shader'"] impl ShaderStageFlags { pub const TASK_NV: Self = Self::TASK_EXT; pub const MESH_NV: Self = Self::MESH_EXT; } #[doc = "Generated from 'VK_NV_mesh_shader'"] impl StructureType { pub const PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: Self = Self(1_000_202_000); pub const PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV: Self = Self(1_000_202_001); } #[doc = "Generated from 'VK_NV_fragment_shader_barycentric'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV: Self = Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR; } #[doc = "Generated from 'VK_NV_shader_image_footprint'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: Self = Self(1_000_204_000); } #[doc = "Generated from 'VK_NV_scissor_exclusive'"] impl DynamicState { pub const EXCLUSIVE_SCISSOR_ENABLE_NV: Self = Self(1_000_205_000); pub const EXCLUSIVE_SCISSOR_NV: Self = Self(1_000_205_001); } #[doc = "Generated from 'VK_NV_scissor_exclusive'"] impl StructureType { pub const PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: Self = Self(1_000_205_000); pub const PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: Self = Self(1_000_205_002); } #[doc = "Generated from 'VK_NV_device_diagnostic_checkpoints'"] impl StructureType { pub const CHECKPOINT_DATA_NV: Self = Self(1_000_206_000); pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: Self = Self(1_000_206_001); } #[doc = "Generated from 'VK_KHR_timeline_semaphore'"] impl SemaphoreType { pub const BINARY_KHR: Self = Self::BINARY; pub const TIMELINE_KHR: Self = Self::TIMELINE; } #[doc = "Generated from 'VK_KHR_timeline_semaphore'"] impl SemaphoreWaitFlags { pub const ANY_KHR: Self = Self::ANY; } #[doc = "Generated from 'VK_KHR_timeline_semaphore'"] impl StructureType { pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES; pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES; pub const SEMAPHORE_TYPE_CREATE_INFO_KHR: Self = Self::SEMAPHORE_TYPE_CREATE_INFO; pub const TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR: Self = Self::TIMELINE_SEMAPHORE_SUBMIT_INFO; pub const SEMAPHORE_WAIT_INFO_KHR: Self = Self::SEMAPHORE_WAIT_INFO; pub const SEMAPHORE_SIGNAL_INFO_KHR: Self = Self::SEMAPHORE_SIGNAL_INFO; } #[doc = "Generated from 'VK_INTEL_shader_integer_functions2'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: Self = Self(1_000_209_000); } #[doc = "Generated from 'VK_INTEL_performance_query'"] impl ObjectType { pub const PERFORMANCE_CONFIGURATION_INTEL: Self = Self(1_000_210_000); } #[doc = "Generated from 'VK_INTEL_performance_query'"] impl QueryType { pub const PERFORMANCE_QUERY_INTEL: Self = Self(1_000_210_000); } #[doc = "Generated from 'VK_INTEL_performance_query'"] impl StructureType { pub const QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: Self = Self(1_000_210_000); pub const INITIALIZE_PERFORMANCE_API_INFO_INTEL: Self = Self(1_000_210_001); pub const PERFORMANCE_MARKER_INFO_INTEL: Self = Self(1_000_210_002); pub const PERFORMANCE_STREAM_MARKER_INFO_INTEL: Self = Self(1_000_210_003); pub const PERFORMANCE_OVERRIDE_INFO_INTEL: Self = Self(1_000_210_004); pub const PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL: Self = Self(1_000_210_005); } #[doc = "Generated from 'VK_KHR_vulkan_memory_model'"] impl StructureType { pub const PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES; } #[doc = "Generated from 'VK_EXT_pci_bus_info'"] impl StructureType { pub const PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT: Self = Self(1_000_212_000); } #[doc = "Generated from 'VK_AMD_display_native_hdr'"] impl ColorSpaceKHR { pub const DISPLAY_NATIVE_AMD: Self = Self(1_000_213_000); } #[doc = "Generated from 'VK_AMD_display_native_hdr'"] impl StructureType { pub const DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD: Self = Self(1_000_213_000); pub const SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: Self = Self(1_000_213_001); } #[doc = "Generated from 'VK_FUCHSIA_imagepipe_surface'"] impl StructureType { pub const IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: Self = Self(1_000_214_000); } #[doc = "Generated from 'VK_KHR_shader_terminate_invocation'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES; } #[doc = "Generated from 'VK_EXT_metal_surface'"] impl StructureType { pub const METAL_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_217_000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl AccessFlags { pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl FormatFeatureFlags { pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl FormatFeatureFlags2 { pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl ImageCreateFlags { pub const SUBSAMPLED_EXT: Self = Self(0b100_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl ImageLayout { pub const FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: Self = Self(1_000_218_000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl ImageUsageFlags { pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl ImageViewCreateFlags { pub const FRAGMENT_DENSITY_MAP_DYNAMIC_EXT: Self = Self(0b1); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl PipelineStageFlags { pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl SamplerCreateFlags { pub const SUBSAMPLED_EXT: Self = Self(0b1); pub const SUBSAMPLED_COARSE_RECONSTRUCTION_EXT: Self = Self(0b10); } #[doc = "Generated from 'VK_EXT_fragment_density_map'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: Self = Self(1_000_218_000); pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: Self = Self(1_000_218_001); pub const RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: Self = Self(1_000_218_002); } #[doc = "Generated from 'VK_EXT_scalar_block_layout'"] impl StructureType { pub const PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES; } #[doc = "Generated from 'VK_EXT_subgroup_size_control'"] impl PipelineShaderStageCreateFlags { pub const ALLOW_VARYING_SUBGROUP_SIZE_EXT: Self = Self::ALLOW_VARYING_SUBGROUP_SIZE; pub const REQUIRE_FULL_SUBGROUPS_EXT: Self = Self::REQUIRE_FULL_SUBGROUPS; } #[doc = "Generated from 'VK_EXT_subgroup_size_control'"] impl StructureType { pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES; pub const PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT: Self = Self::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO; pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES; } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl AccessFlags { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl DynamicState { pub const FRAGMENT_SHADING_RATE_KHR: Self = Self(1_000_226_000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl FormatFeatureFlags { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl FormatFeatureFlags2 { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl ImageLayout { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR: Self = Self(1_000_164_003); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl ImageUsageFlags { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl PipelineStageFlags { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_fragment_shading_rate'"] impl StructureType { pub const FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: Self = Self(1_000_226_000); pub const PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR: Self = Self(1_000_226_001); pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR: Self = Self(1_000_226_002); pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR: Self = Self(1_000_226_003); pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR: Self = Self(1_000_226_004); } #[doc = "Generated from 'VK_AMD_shader_core_properties2'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD: Self = Self(1_000_227_000); } #[doc = "Generated from 'VK_AMD_device_coherent_memory'"] impl MemoryPropertyFlags { pub const DEVICE_COHERENT_AMD: Self = Self(0b100_0000); pub const DEVICE_UNCACHED_AMD: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_AMD_device_coherent_memory'"] impl StructureType { pub const PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: Self = Self(1_000_229_000); } #[doc = "Generated from 'VK_KHR_dynamic_rendering_local_read'"] impl ImageLayout { pub const RENDERING_LOCAL_READ_KHR: Self = Self(1_000_232_000); } #[doc = "Generated from 'VK_KHR_dynamic_rendering_local_read'"] impl StructureType { pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR: Self = Self(1_000_232_000); pub const RENDERING_ATTACHMENT_LOCATION_INFO_KHR: Self = Self(1_000_232_001); pub const RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR: Self = Self(1_000_232_002); } #[doc = "Generated from 'VK_EXT_shader_image_atomic_int64'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT: Self = Self(1_000_234_000); } #[doc = "Generated from 'VK_KHR_shader_quad_control'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR: Self = Self(1_000_235_000); } #[doc = "Generated from 'VK_EXT_memory_budget'"] impl StructureType { pub const PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: Self = Self(1_000_237_000); } #[doc = "Generated from 'VK_EXT_memory_priority'"] impl StructureType { pub const PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: Self = Self(1_000_238_000); pub const MEMORY_PRIORITY_ALLOCATE_INFO_EXT: Self = Self(1_000_238_001); } #[doc = "Generated from 'VK_KHR_surface_protected_capabilities'"] impl StructureType { pub const SURFACE_PROTECTED_CAPABILITIES_KHR: Self = Self(1_000_239_000); } #[doc = "Generated from 'VK_NV_dedicated_allocation_image_aliasing'"] impl StructureType { pub const PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: Self = Self(1_000_240_000); } #[doc = "Generated from 'VK_KHR_separate_depth_stencil_layouts'"] impl ImageLayout { pub const DEPTH_ATTACHMENT_OPTIMAL_KHR: Self = Self::DEPTH_ATTACHMENT_OPTIMAL; pub const DEPTH_READ_ONLY_OPTIMAL_KHR: Self = Self::DEPTH_READ_ONLY_OPTIMAL; pub const STENCIL_ATTACHMENT_OPTIMAL_KHR: Self = Self::STENCIL_ATTACHMENT_OPTIMAL; pub const STENCIL_READ_ONLY_OPTIMAL_KHR: Self = Self::STENCIL_READ_ONLY_OPTIMAL; } #[doc = "Generated from 'VK_KHR_separate_depth_stencil_layouts'"] impl StructureType { pub const PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES; pub const ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR: Self = Self::ATTACHMENT_REFERENCE_STENCIL_LAYOUT; pub const ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR: Self = Self::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT; } #[doc = "Generated from 'VK_EXT_buffer_device_address'"] impl BufferCreateFlags { pub const DEVICE_ADDRESS_CAPTURE_REPLAY_EXT: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY; } #[doc = "Generated from 'VK_EXT_buffer_device_address'"] impl BufferUsageFlags { pub const SHADER_DEVICE_ADDRESS_EXT: Self = Self::SHADER_DEVICE_ADDRESS; } #[doc = "Generated from 'VK_EXT_buffer_device_address'"] impl Result { pub const ERROR_INVALID_DEVICE_ADDRESS_EXT: Self = Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS; } #[doc = "Generated from 'VK_EXT_buffer_device_address'"] impl StructureType { pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: Self = Self(1_000_244_000); pub const PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT; pub const BUFFER_DEVICE_ADDRESS_INFO_EXT: Self = Self::BUFFER_DEVICE_ADDRESS_INFO; pub const BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: Self = Self(1_000_244_002); } #[doc = "Generated from 'VK_EXT_tooling_info'"] impl StructureType { pub const PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_TOOL_PROPERTIES; } #[doc = "Generated from 'VK_EXT_tooling_info'"] impl ToolPurposeFlags { pub const DEBUG_REPORTING_EXT: Self = Self(0b10_0000); pub const DEBUG_MARKERS_EXT: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_EXT_separate_stencil_usage'"] impl StructureType { pub const IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: Self = Self::IMAGE_STENCIL_USAGE_CREATE_INFO; } #[doc = "Generated from 'VK_EXT_validation_features'"] impl StructureType { pub const VALIDATION_FEATURES_EXT: Self = Self(1_000_247_000); } #[doc = "Generated from 'VK_KHR_present_wait'"] impl StructureType { pub const PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR: Self = Self(1_000_248_000); } #[doc = "Generated from 'VK_NV_cooperative_matrix'"] impl ComponentTypeKHR { pub const FLOAT16_NV: Self = Self::FLOAT16; pub const FLOAT32_NV: Self = Self::FLOAT32; pub const FLOAT64_NV: Self = Self::FLOAT64; pub const SINT8_NV: Self = Self::SINT8; pub const SINT16_NV: Self = Self::SINT16; pub const SINT32_NV: Self = Self::SINT32; pub const SINT64_NV: Self = Self::SINT64; pub const UINT8_NV: Self = Self::UINT8; pub const UINT16_NV: Self = Self::UINT16; pub const UINT32_NV: Self = Self::UINT32; pub const UINT64_NV: Self = Self::UINT64; } #[doc = "Generated from 'VK_NV_cooperative_matrix'"] impl ScopeKHR { pub const DEVICE_NV: Self = Self::DEVICE; pub const WORKGROUP_NV: Self = Self::WORKGROUP; pub const SUBGROUP_NV: Self = Self::SUBGROUP; pub const QUEUE_FAMILY_NV: Self = Self::QUEUE_FAMILY; } #[doc = "Generated from 'VK_NV_cooperative_matrix'"] impl StructureType { pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: Self = Self(1_000_249_000); pub const COOPERATIVE_MATRIX_PROPERTIES_NV: Self = Self(1_000_249_001); pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV: Self = Self(1_000_249_002); } #[doc = "Generated from 'VK_NV_coverage_reduction_mode'"] impl StructureType { pub const PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: Self = Self(1_000_250_000); pub const PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: Self = Self(1_000_250_001); pub const FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV: Self = Self(1_000_250_002); } #[doc = "Generated from 'VK_EXT_fragment_shader_interlock'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: Self = Self(1_000_251_000); } #[doc = "Generated from 'VK_EXT_ycbcr_image_arrays'"] impl StructureType { pub const PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: Self = Self(1_000_252_000); } #[doc = "Generated from 'VK_KHR_uniform_buffer_standard_layout'"] impl StructureType { pub const PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES; } #[doc = "Generated from 'VK_EXT_provoking_vertex'"] impl StructureType { pub const PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: Self = Self(1_000_254_000); pub const PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT: Self = Self(1_000_254_001); pub const PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: Self = Self(1_000_254_002); } #[doc = "Generated from 'VK_EXT_full_screen_exclusive'"] impl Result { pub const ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: Self = Self(-1_000_255_000); } #[doc = "Generated from 'VK_EXT_full_screen_exclusive'"] impl StructureType { pub const SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: Self = Self(1_000_255_000); pub const SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: Self = Self(1_000_255_002); pub const SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT: Self = Self(1_000_255_001); } #[doc = "Generated from 'VK_EXT_headless_surface'"] impl StructureType { pub const HEADLESS_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_256_000); } #[doc = "Generated from 'VK_KHR_buffer_device_address'"] impl BufferCreateFlags { pub const DEVICE_ADDRESS_CAPTURE_REPLAY_KHR: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY; } #[doc = "Generated from 'VK_KHR_buffer_device_address'"] impl BufferUsageFlags { pub const SHADER_DEVICE_ADDRESS_KHR: Self = Self::SHADER_DEVICE_ADDRESS; } #[doc = "Generated from 'VK_KHR_buffer_device_address'"] impl MemoryAllocateFlags { pub const DEVICE_ADDRESS_KHR: Self = Self::DEVICE_ADDRESS; pub const DEVICE_ADDRESS_CAPTURE_REPLAY_KHR: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY; } #[doc = "Generated from 'VK_KHR_buffer_device_address'"] impl Result { pub const ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR: Self = Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS; } #[doc = "Generated from 'VK_KHR_buffer_device_address'"] impl StructureType { pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES; pub const BUFFER_DEVICE_ADDRESS_INFO_KHR: Self = Self::BUFFER_DEVICE_ADDRESS_INFO; pub const BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR: Self = Self::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO; pub const MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR: Self = Self::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO; pub const DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR: Self = Self::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO; } #[doc = "Generated from 'VK_EXT_line_rasterization'"] impl DynamicState { pub const LINE_STIPPLE_EXT: Self = Self::LINE_STIPPLE_KHR; } #[doc = "Generated from 'VK_EXT_line_rasterization'"] impl StructureType { pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR; pub const PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT: Self = Self::PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR; pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR; } #[doc = "Generated from 'VK_EXT_shader_atomic_float'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT: Self = Self(1_000_260_000); } #[doc = "Generated from 'VK_EXT_host_query_reset'"] impl StructureType { pub const PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES; } #[doc = "Generated from 'VK_EXT_index_type_uint8'"] impl IndexType { pub const UINT8_EXT: Self = Self::UINT8_KHR; } #[doc = "Generated from 'VK_EXT_index_type_uint8'"] impl StructureType { pub const PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR; } #[doc = "Generated from 'VK_EXT_extended_dynamic_state'"] impl DynamicState { pub const CULL_MODE_EXT: Self = Self::CULL_MODE; pub const FRONT_FACE_EXT: Self = Self::FRONT_FACE; pub const PRIMITIVE_TOPOLOGY_EXT: Self = Self::PRIMITIVE_TOPOLOGY; pub const VIEWPORT_WITH_COUNT_EXT: Self = Self::VIEWPORT_WITH_COUNT; pub const SCISSOR_WITH_COUNT_EXT: Self = Self::SCISSOR_WITH_COUNT; pub const VERTEX_INPUT_BINDING_STRIDE_EXT: Self = Self::VERTEX_INPUT_BINDING_STRIDE; pub const DEPTH_TEST_ENABLE_EXT: Self = Self::DEPTH_TEST_ENABLE; pub const DEPTH_WRITE_ENABLE_EXT: Self = Self::DEPTH_WRITE_ENABLE; pub const DEPTH_COMPARE_OP_EXT: Self = Self::DEPTH_COMPARE_OP; pub const DEPTH_BOUNDS_TEST_ENABLE_EXT: Self = Self::DEPTH_BOUNDS_TEST_ENABLE; pub const STENCIL_TEST_ENABLE_EXT: Self = Self::STENCIL_TEST_ENABLE; pub const STENCIL_OP_EXT: Self = Self::STENCIL_OP; } #[doc = "Generated from 'VK_EXT_extended_dynamic_state'"] impl StructureType { #[doc = "Not promoted to 1.3"] pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: Self = Self(1_000_267_000); } #[doc = "Generated from 'VK_KHR_deferred_host_operations'"] impl ObjectType { pub const DEFERRED_OPERATION_KHR: Self = Self(1_000_268_000); } #[doc = "Generated from 'VK_KHR_deferred_host_operations'"] impl Result { pub const THREAD_IDLE_KHR: Self = Self(1_000_268_000); pub const THREAD_DONE_KHR: Self = Self(1_000_268_001); pub const OPERATION_DEFERRED_KHR: Self = Self(1_000_268_002); pub const OPERATION_NOT_DEFERRED_KHR: Self = Self(1_000_268_003); } #[doc = "Generated from 'VK_KHR_pipeline_executable_properties'"] impl PipelineCreateFlags { pub const CAPTURE_STATISTICS_KHR: Self = Self(0b100_0000); pub const CAPTURE_INTERNAL_REPRESENTATIONS_KHR: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_KHR_pipeline_executable_properties'"] impl StructureType { pub const PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: Self = Self(1_000_269_000); pub const PIPELINE_INFO_KHR: Self = Self(1_000_269_001); pub const PIPELINE_EXECUTABLE_PROPERTIES_KHR: Self = Self(1_000_269_002); pub const PIPELINE_EXECUTABLE_INFO_KHR: Self = Self(1_000_269_003); pub const PIPELINE_EXECUTABLE_STATISTIC_KHR: Self = Self(1_000_269_004); pub const PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR: Self = Self(1_000_269_005); } #[doc = "Generated from 'VK_EXT_host_image_copy'"] impl FormatFeatureFlags2 { #[doc = "Host image copies are supported"] pub const HOST_IMAGE_TRANSFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_host_image_copy'"] impl ImageUsageFlags { #[doc = "Can be used with host image copies"] pub const HOST_TRANSFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_host_image_copy'"] impl StructureType { pub const PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT: Self = Self(1_000_270_000); pub const PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT: Self = Self(1_000_270_001); pub const MEMORY_TO_IMAGE_COPY_EXT: Self = Self(1_000_270_002); pub const IMAGE_TO_MEMORY_COPY_EXT: Self = Self(1_000_270_003); pub const COPY_IMAGE_TO_MEMORY_INFO_EXT: Self = Self(1_000_270_004); pub const COPY_MEMORY_TO_IMAGE_INFO_EXT: Self = Self(1_000_270_005); pub const HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT: Self = Self(1_000_270_006); pub const COPY_IMAGE_TO_IMAGE_INFO_EXT: Self = Self(1_000_270_007); pub const SUBRESOURCE_HOST_MEMCPY_SIZE_EXT: Self = Self(1_000_270_008); pub const HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT: Self = Self(1_000_270_009); } #[doc = "Generated from 'VK_KHR_map_memory2'"] impl StructureType { pub const MEMORY_MAP_INFO_KHR: Self = Self(1_000_271_000); pub const MEMORY_UNMAP_INFO_KHR: Self = Self(1_000_271_001); } #[doc = "Generated from 'VK_EXT_map_memory_placed'"] impl MemoryMapFlags { pub const PLACED_EXT: Self = Self(0b1); } #[doc = "Generated from 'VK_EXT_map_memory_placed'"] impl MemoryUnmapFlagsKHR { pub const RESERVE_EXT: Self = Self(0b1); } #[doc = "Generated from 'VK_EXT_map_memory_placed'"] impl StructureType { pub const PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT: Self = Self(1_000_272_000); pub const PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT: Self = Self(1_000_272_001); pub const MEMORY_MAP_PLACED_INFO_EXT: Self = Self(1_000_272_002); } #[doc = "Generated from 'VK_EXT_shader_atomic_float2'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT: Self = Self(1_000_273_000); } #[doc = "Generated from 'VK_EXT_surface_maintenance1'"] impl StructureType { pub const SURFACE_PRESENT_MODE_EXT: Self = Self(1_000_274_000); pub const SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: Self = Self(1_000_274_001); pub const SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: Self = Self(1_000_274_002); } #[doc = "Generated from 'VK_EXT_swapchain_maintenance1'"] impl StructureType { pub const PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT: Self = Self(1_000_275_000); pub const SWAPCHAIN_PRESENT_FENCE_INFO_EXT: Self = Self(1_000_275_001); pub const SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT: Self = Self(1_000_275_002); pub const SWAPCHAIN_PRESENT_MODE_INFO_EXT: Self = Self(1_000_275_003); pub const SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT: Self = Self(1_000_275_004); pub const RELEASE_SWAPCHAIN_IMAGES_INFO_EXT: Self = Self(1_000_275_005); } #[doc = "Generated from 'VK_EXT_swapchain_maintenance1'"] impl SwapchainCreateFlagsKHR { pub const DEFERRED_MEMORY_ALLOCATION_EXT: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_shader_demote_to_helper_invocation'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES; } #[doc = "Generated from 'VK_NV_device_generated_commands'"] impl AccessFlags { pub const COMMAND_PREPROCESS_READ_NV: Self = Self(0b10_0000_0000_0000_0000); pub const COMMAND_PREPROCESS_WRITE_NV: Self = Self(0b100_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_device_generated_commands'"] impl ObjectType { pub const INDIRECT_COMMANDS_LAYOUT_NV: Self = Self(1_000_277_000); } #[doc = "Generated from 'VK_NV_device_generated_commands'"] impl PipelineCreateFlags { pub const INDIRECT_BINDABLE_NV: Self = Self(0b100_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_device_generated_commands'"] impl PipelineStageFlags { pub const COMMAND_PREPROCESS_NV: Self = Self(0b10_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_device_generated_commands'"] impl StructureType { pub const PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV: Self = Self(1_000_277_000); pub const GRAPHICS_SHADER_GROUP_CREATE_INFO_NV: Self = Self(1_000_277_001); pub const GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV: Self = Self(1_000_277_002); pub const INDIRECT_COMMANDS_LAYOUT_TOKEN_NV: Self = Self(1_000_277_003); pub const INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV: Self = Self(1_000_277_004); pub const GENERATED_COMMANDS_INFO_NV: Self = Self(1_000_277_005); pub const GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV: Self = Self(1_000_277_006); pub const PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV: Self = Self(1_000_277_007); } #[doc = "Generated from 'VK_NV_inherited_viewport_scissor'"] impl StructureType { pub const PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV: Self = Self(1_000_278_000); pub const COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV: Self = Self(1_000_278_001); } #[doc = "Generated from 'VK_KHR_shader_integer_dot_product'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES; pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES; } #[doc = "Generated from 'VK_EXT_texel_buffer_alignment'"] impl StructureType { #[doc = "Not promoted to 1.3"] pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: Self = Self(1_000_281_000); pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES; } #[doc = "Generated from 'VK_QCOM_render_pass_transform'"] impl RenderPassCreateFlags { pub const TRANSFORM_QCOM: Self = Self(0b10); } #[doc = "Generated from 'VK_QCOM_render_pass_transform'"] impl StructureType { pub const COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM: Self = Self(1_000_282_000); pub const RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM: Self = Self(1_000_282_001); } #[doc = "Generated from 'VK_EXT_depth_bias_control'"] impl StructureType { pub const PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT: Self = Self(1_000_283_000); pub const DEPTH_BIAS_INFO_EXT: Self = Self(1_000_283_001); pub const DEPTH_BIAS_REPRESENTATION_INFO_EXT: Self = Self(1_000_283_002); } #[doc = "Generated from 'VK_EXT_device_memory_report'"] impl StructureType { pub const PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT: Self = Self(1_000_284_000); pub const DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT: Self = Self(1_000_284_001); pub const DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT: Self = Self(1_000_284_002); } #[doc = "Generated from 'VK_EXT_robustness2'"] impl StructureType { pub const PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: Self = Self(1_000_286_000); pub const PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: Self = Self(1_000_286_001); } #[doc = "Generated from 'VK_EXT_custom_border_color'"] impl BorderColor { pub const FLOAT_CUSTOM_EXT: Self = Self(1_000_287_003); pub const INT_CUSTOM_EXT: Self = Self(1_000_287_004); } #[doc = "Generated from 'VK_EXT_custom_border_color'"] impl StructureType { pub const SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT: Self = Self(1_000_287_000); pub const PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: Self = Self(1_000_287_001); pub const PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: Self = Self(1_000_287_002); } #[doc = "Generated from 'VK_KHR_pipeline_library'"] impl PipelineCreateFlags { pub const LIBRARY_KHR: Self = Self(0b1000_0000_0000); } #[doc = "Generated from 'VK_KHR_pipeline_library'"] impl StructureType { pub const PIPELINE_LIBRARY_CREATE_INFO_KHR: Self = Self(1_000_290_000); } #[doc = "Generated from 'VK_NV_present_barrier'"] impl StructureType { pub const PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV: Self = Self(1_000_292_000); pub const SURFACE_CAPABILITIES_PRESENT_BARRIER_NV: Self = Self(1_000_292_001); pub const SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV: Self = Self(1_000_292_002); } #[doc = "Generated from 'VK_KHR_present_id'"] impl StructureType { pub const PRESENT_ID_KHR: Self = Self(1_000_294_000); pub const PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR: Self = Self(1_000_294_001); } #[doc = "Generated from 'VK_EXT_private_data'"] impl ObjectType { pub const PRIVATE_DATA_SLOT_EXT: Self = Self::PRIVATE_DATA_SLOT; } #[doc = "Generated from 'VK_EXT_private_data'"] impl StructureType { pub const PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES; pub const DEVICE_PRIVATE_DATA_CREATE_INFO_EXT: Self = Self::DEVICE_PRIVATE_DATA_CREATE_INFO; pub const PRIVATE_DATA_SLOT_CREATE_INFO_EXT: Self = Self::PRIVATE_DATA_SLOT_CREATE_INFO; } #[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"] impl PipelineCacheCreateFlags { pub const EXTERNALLY_SYNCHRONIZED_EXT: Self = Self::EXTERNALLY_SYNCHRONIZED; } #[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"] impl PipelineCreateFlags { pub const FAIL_ON_PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::FAIL_ON_PIPELINE_COMPILE_REQUIRED; pub const EARLY_RETURN_ON_FAILURE_EXT: Self = Self::EARLY_RETURN_ON_FAILURE; } #[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"] impl Result { pub const PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::PIPELINE_COMPILE_REQUIRED; pub const ERROR_PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::PIPELINE_COMPILE_REQUIRED; } #[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"] impl StructureType { pub const PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES; } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl AccessFlags2 { pub const VIDEO_ENCODE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const VIDEO_ENCODE_WRITE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl BufferUsageFlags { pub const VIDEO_ENCODE_DST_KHR: Self = Self(0b1000_0000_0000_0000); pub const VIDEO_ENCODE_SRC_KHR: Self = Self(0b1_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl FormatFeatureFlags { pub const VIDEO_ENCODE_INPUT_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl FormatFeatureFlags2 { pub const VIDEO_ENCODE_INPUT_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl ImageLayout { pub const VIDEO_ENCODE_DST_KHR: Self = Self(1_000_299_000); pub const VIDEO_ENCODE_SRC_KHR: Self = Self(1_000_299_001); pub const VIDEO_ENCODE_DPB_KHR: Self = Self(1_000_299_002); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl ImageUsageFlags { pub const VIDEO_ENCODE_DST_KHR: Self = Self(0b10_0000_0000_0000); pub const VIDEO_ENCODE_SRC_KHR: Self = Self(0b100_0000_0000_0000); pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl PipelineStageFlags2 { pub const VIDEO_ENCODE_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl QueryResultStatusKHR { pub const INSUFFICIENTSTREAM_BUFFER_RANGE: Self = Self(-1_000_299_000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl QueryType { pub const VIDEO_ENCODE_FEEDBACK_KHR: Self = Self(1_000_299_000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl QueueFlags { pub const VIDEO_ENCODE_KHR: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl Result { pub const ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR: Self = Self(-1_000_299_000); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl StructureType { pub const VIDEO_ENCODE_INFO_KHR: Self = Self(1_000_299_000); pub const VIDEO_ENCODE_RATE_CONTROL_INFO_KHR: Self = Self(1_000_299_001); pub const VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR: Self = Self(1_000_299_002); pub const VIDEO_ENCODE_CAPABILITIES_KHR: Self = Self(1_000_299_003); pub const VIDEO_ENCODE_USAGE_INFO_KHR: Self = Self(1_000_299_004); pub const QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR: Self = Self(1_000_299_005); pub const PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: Self = Self(1_000_299_006); pub const VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR: Self = Self(1_000_299_007); pub const VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR: Self = Self(1_000_299_008); pub const VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR: Self = Self(1_000_299_009); pub const VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR: Self = Self(1_000_299_010); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl VideoCodingControlFlagsKHR { pub const ENCODE_RATE_CONTROL: Self = Self(0b10); pub const ENCODE_QUALITY_LEVEL: Self = Self(0b100); } #[doc = "Generated from 'VK_KHR_video_encode_queue'"] impl VideoSessionCreateFlagsKHR { pub const ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS: Self = Self(0b10); } #[doc = "Generated from 'VK_NV_device_diagnostics_config'"] impl StructureType { pub const PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV: Self = Self(1_000_300_000); pub const DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV: Self = Self(1_000_300_001); } #[doc = "Generated from 'VK_QCOM_render_pass_store_ops'"] impl AttachmentStoreOp { pub const NONE_QCOM: Self = Self::NONE; } #[doc = "Generated from 'VK_NV_cuda_kernel_launch'"] impl DebugReportObjectTypeEXT { pub const CUDA_MODULE_NV: Self = Self(1_000_307_000); pub const CUDA_FUNCTION_NV: Self = Self(1_000_307_001); } #[doc = "Generated from 'VK_NV_cuda_kernel_launch'"] impl ObjectType { pub const CUDA_MODULE_NV: Self = Self(1_000_307_000); pub const CUDA_FUNCTION_NV: Self = Self(1_000_307_001); } #[doc = "Generated from 'VK_NV_cuda_kernel_launch'"] impl StructureType { pub const CUDA_MODULE_CREATE_INFO_NV: Self = Self(1_000_307_000); pub const CUDA_FUNCTION_CREATE_INFO_NV: Self = Self(1_000_307_001); pub const CUDA_LAUNCH_INFO_NV: Self = Self(1_000_307_002); pub const PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV: Self = Self(1_000_307_003); pub const PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV: Self = Self(1_000_307_004); } #[doc = "Generated from 'VK_NV_low_latency'"] impl StructureType { pub const QUERY_LOW_LATENCY_SUPPORT_NV: Self = Self(1_000_310_000); } #[doc = "Generated from 'VK_EXT_metal_objects'"] impl StructureType { pub const EXPORT_METAL_OBJECT_CREATE_INFO_EXT: Self = Self(1_000_311_000); pub const EXPORT_METAL_OBJECTS_INFO_EXT: Self = Self(1_000_311_001); pub const EXPORT_METAL_DEVICE_INFO_EXT: Self = Self(1_000_311_002); pub const EXPORT_METAL_COMMAND_QUEUE_INFO_EXT: Self = Self(1_000_311_003); pub const EXPORT_METAL_BUFFER_INFO_EXT: Self = Self(1_000_311_004); pub const IMPORT_METAL_BUFFER_INFO_EXT: Self = Self(1_000_311_005); pub const EXPORT_METAL_TEXTURE_INFO_EXT: Self = Self(1_000_311_006); pub const IMPORT_METAL_TEXTURE_INFO_EXT: Self = Self(1_000_311_007); pub const EXPORT_METAL_IO_SURFACE_INFO_EXT: Self = Self(1_000_311_008); pub const IMPORT_METAL_IO_SURFACE_INFO_EXT: Self = Self(1_000_311_009); pub const EXPORT_METAL_SHARED_EVENT_INFO_EXT: Self = Self(1_000_311_010); pub const IMPORT_METAL_SHARED_EVENT_INFO_EXT: Self = Self(1_000_311_011); } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl AccessFlags { pub const NONE_KHR: Self = Self::NONE; } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl AccessFlags2 { pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); #[doc = "read access flag for reading conditional rendering predicate"] pub const CONDITIONAL_RENDERING_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000); pub const COMMAND_PREPROCESS_READ_NV: Self = Self(0b10_0000_0000_0000_0000); pub const COMMAND_PREPROCESS_WRITE_NV: Self = Self(0b100_0000_0000_0000_0000); pub const FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const SHADING_RATE_IMAGE_READ_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR; pub const ACCELERATION_STRUCTURE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_WRITE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_READ_NV: Self = Self::ACCELERATION_STRUCTURE_READ_KHR; pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = Self::ACCELERATION_STRUCTURE_WRITE_KHR; pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = Self(0b1000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl EventCreateFlags { pub const DEVICE_ONLY_KHR: Self = Self::DEVICE_ONLY; } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl ImageLayout { pub const READ_ONLY_OPTIMAL_KHR: Self = Self::READ_ONLY_OPTIMAL; pub const ATTACHMENT_OPTIMAL_KHR: Self = Self::ATTACHMENT_OPTIMAL; } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl PipelineStageFlags { pub const NONE_KHR: Self = Self::NONE; } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl PipelineStageFlags2 { pub const TRANSFORM_FEEDBACK_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); #[doc = "A pipeline stage for conditional rendering predicate fetch"] pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b100_0000_0000_0000_0000); pub const COMMAND_PREPROCESS_NV: Self = Self(0b10_0000_0000_0000_0000); pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000); pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR; pub const ACCELERATION_STRUCTURE_BUILD_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const RAY_TRACING_SHADER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000); pub const RAY_TRACING_SHADER_NV: Self = Self::RAY_TRACING_SHADER_KHR; pub const ACCELERATION_STRUCTURE_BUILD_NV: Self = Self::ACCELERATION_STRUCTURE_BUILD_KHR; pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const TASK_SHADER_NV: Self = Self::TASK_SHADER_EXT; pub const MESH_SHADER_NV: Self = Self::MESH_SHADER_EXT; pub const TASK_SHADER_EXT: Self = Self(0b1000_0000_0000_0000_0000); pub const MESH_SHADER_EXT: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_synchronization2'"] impl StructureType { pub const MEMORY_BARRIER_2_KHR: Self = Self::MEMORY_BARRIER_2; pub const BUFFER_MEMORY_BARRIER_2_KHR: Self = Self::BUFFER_MEMORY_BARRIER_2; pub const IMAGE_MEMORY_BARRIER_2_KHR: Self = Self::IMAGE_MEMORY_BARRIER_2; pub const DEPENDENCY_INFO_KHR: Self = Self::DEPENDENCY_INFO; pub const SUBMIT_INFO_2_KHR: Self = Self::SUBMIT_INFO_2; pub const SEMAPHORE_SUBMIT_INFO_KHR: Self = Self::SEMAPHORE_SUBMIT_INFO; pub const COMMAND_BUFFER_SUBMIT_INFO_KHR: Self = Self::COMMAND_BUFFER_SUBMIT_INFO; pub const PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES; pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: Self = Self(1_000_314_008); pub const CHECKPOINT_DATA_2_NV: Self = Self(1_000_314_009); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl AccelerationStructureCreateFlagsKHR { pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl AccessFlags2 { pub const DESCRIPTOR_BUFFER_READ_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl BufferCreateFlags { pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl BufferUsageFlags { pub const SAMPLER_DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000); pub const RESOURCE_DESCRIPTOR_BUFFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000); pub const PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl DescriptorSetLayoutCreateFlags { pub const DESCRIPTOR_BUFFER_EXT: Self = Self(0b1_0000); pub const EMBEDDED_IMMUTABLE_SAMPLERS_EXT: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl ImageCreateFlags { pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl ImageViewCreateFlags { pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b100); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl PipelineCreateFlags { pub const DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl SamplerCreateFlags { pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_descriptor_buffer'"] impl StructureType { pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT: Self = Self(1_000_316_000); pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT: Self = Self(1_000_316_001); pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT: Self = Self(1_000_316_002); pub const DESCRIPTOR_ADDRESS_INFO_EXT: Self = Self(1_000_316_003); pub const DESCRIPTOR_GET_INFO_EXT: Self = Self(1_000_316_004); pub const BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_005); pub const IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_006); pub const IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_007); pub const SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_008); pub const OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT: Self = Self(1_000_316_010); pub const DESCRIPTOR_BUFFER_BINDING_INFO_EXT: Self = Self(1_000_316_011); pub const DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT: Self = Self(1_000_316_012); pub const ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_009); } #[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"] impl PipelineCreateFlags { pub const RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const LINK_TIME_OPTIMIZATION_EXT: Self = Self(0b100_0000_0000); } #[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"] impl PipelineLayoutCreateFlags { pub const INDEPENDENT_SETS_EXT: Self = Self(0b10); } #[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"] impl StructureType { pub const PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT: Self = Self(1_000_320_000); pub const PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT: Self = Self(1_000_320_001); pub const GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT: Self = Self(1_000_320_002); } #[doc = "Generated from 'VK_AMD_shader_early_and_late_fragment_tests'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD: Self = Self(1_000_321_000); } #[doc = "Generated from 'VK_KHR_fragment_shader_barycentric'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR: Self = Self(1_000_203_000); pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR: Self = Self(1_000_322_000); } #[doc = "Generated from 'VK_KHR_shader_subgroup_uniform_control_flow'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR: Self = Self(1_000_323_000); } #[doc = "Generated from 'VK_KHR_zero_initialize_workgroup_memory'"] impl StructureType { pub const PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES; } #[doc = "Generated from 'VK_NV_fragment_shading_rate_enums'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV: Self = Self(1_000_326_000); pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV: Self = Self(1_000_326_001); pub const PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV: Self = Self(1_000_326_002); } #[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"] impl AccelerationStructureCreateFlagsKHR { pub const MOTION_NV: Self = Self(0b100); } #[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"] impl BuildAccelerationStructureFlagsKHR { pub const MOTION_NV: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"] impl PipelineCreateFlags { pub const RAY_TRACING_ALLOW_MOTION_NV: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"] impl StructureType { pub const ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV: Self = Self(1_000_327_000); pub const PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV: Self = Self(1_000_327_001); pub const ACCELERATION_STRUCTURE_MOTION_INFO_NV: Self = Self(1_000_327_002); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl IndirectCommandsTokenTypeNV { pub const DRAW_MESH_TASKS: Self = Self(1_000_328_000); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl PipelineStageFlags { pub const TASK_SHADER_EXT: Self = Self(0b1000_0000_0000_0000_0000); pub const MESH_SHADER_EXT: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl QueryPipelineStatisticFlags { pub const TASK_SHADER_INVOCATIONS_EXT: Self = Self(0b1000_0000_0000); pub const MESH_SHADER_INVOCATIONS_EXT: Self = Self(0b1_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl QueryType { pub const MESH_PRIMITIVES_GENERATED_EXT: Self = Self(1_000_328_000); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl ShaderStageFlags { pub const TASK_EXT: Self = Self(0b100_0000); pub const MESH_EXT: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_EXT_mesh_shader'"] impl StructureType { pub const PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT: Self = Self(1_000_328_000); pub const PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT: Self = Self(1_000_328_001); } #[doc = "Generated from 'VK_EXT_ycbcr_2plane_444_formats'"] impl Format { pub const G8_B8R8_2PLANE_444_UNORM_EXT: Self = Self::G8_B8R8_2PLANE_444_UNORM; pub const G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT: Self = Self::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16; pub const G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT: Self = Self::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16; pub const G16_B16R16_2PLANE_444_UNORM_EXT: Self = Self::G16_B16R16_2PLANE_444_UNORM; } #[doc = "Generated from 'VK_EXT_ycbcr_2plane_444_formats'"] impl StructureType { pub const PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT: Self = Self(1_000_330_000); } #[doc = "Generated from 'VK_EXT_fragment_density_map2'"] impl ImageViewCreateFlags { pub const FRAGMENT_DENSITY_MAP_DEFERRED_EXT: Self = Self(0b10); } #[doc = "Generated from 'VK_EXT_fragment_density_map2'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT: Self = Self(1_000_332_000); pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT: Self = Self(1_000_332_001); } #[doc = "Generated from 'VK_QCOM_rotated_copy_commands'"] impl StructureType { pub const COPY_COMMAND_TRANSFORM_INFO_QCOM: Self = Self(1_000_333_000); } #[doc = "Generated from 'VK_EXT_image_robustness'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES; } #[doc = "Generated from 'VK_KHR_workgroup_memory_explicit_layout'"] impl StructureType { pub const PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR: Self = Self(1_000_336_000); } #[doc = "Generated from 'VK_KHR_copy_commands2'"] impl StructureType { pub const COPY_BUFFER_INFO_2_KHR: Self = Self::COPY_BUFFER_INFO_2; pub const COPY_IMAGE_INFO_2_KHR: Self = Self::COPY_IMAGE_INFO_2; pub const COPY_BUFFER_TO_IMAGE_INFO_2_KHR: Self = Self::COPY_BUFFER_TO_IMAGE_INFO_2; pub const COPY_IMAGE_TO_BUFFER_INFO_2_KHR: Self = Self::COPY_IMAGE_TO_BUFFER_INFO_2; pub const BLIT_IMAGE_INFO_2_KHR: Self = Self::BLIT_IMAGE_INFO_2; pub const RESOLVE_IMAGE_INFO_2_KHR: Self = Self::RESOLVE_IMAGE_INFO_2; pub const BUFFER_COPY_2_KHR: Self = Self::BUFFER_COPY_2; pub const IMAGE_COPY_2_KHR: Self = Self::IMAGE_COPY_2; pub const IMAGE_BLIT_2_KHR: Self = Self::IMAGE_BLIT_2; pub const BUFFER_IMAGE_COPY_2_KHR: Self = Self::BUFFER_IMAGE_COPY_2; pub const IMAGE_RESOLVE_2_KHR: Self = Self::IMAGE_RESOLVE_2; } #[doc = "Generated from 'VK_EXT_image_compression_control'"] impl Result { pub const ERROR_COMPRESSION_EXHAUSTED_EXT: Self = Self(-1_000_338_000); } #[doc = "Generated from 'VK_EXT_image_compression_control'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT: Self = Self(1_000_338_000); pub const IMAGE_COMPRESSION_CONTROL_EXT: Self = Self(1_000_338_001); pub const SUBRESOURCE_LAYOUT_2_EXT: Self = Self::SUBRESOURCE_LAYOUT_2_KHR; pub const IMAGE_SUBRESOURCE_2_EXT: Self = Self::IMAGE_SUBRESOURCE_2_KHR; pub const IMAGE_COMPRESSION_PROPERTIES_EXT: Self = Self(1_000_338_004); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"] impl DependencyFlags { #[doc = "Dependency may be a feedback loop"] pub const FEEDBACK_LOOP_EXT: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"] impl ImageLayout { pub const ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT: Self = Self(1_000_339_000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"] impl ImageUsageFlags { pub const ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b1000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"] impl PipelineCreateFlags { pub const COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"] impl StructureType { pub const PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT: Self = Self(1_000_339_000); } #[doc = "Generated from 'VK_EXT_4444_formats'"] impl Format { pub const A4R4G4B4_UNORM_PACK16_EXT: Self = Self::A4R4G4B4_UNORM_PACK16; pub const A4B4G4R4_UNORM_PACK16_EXT: Self = Self::A4B4G4R4_UNORM_PACK16; } #[doc = "Generated from 'VK_EXT_4444_formats'"] impl StructureType { pub const PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: Self = Self(1_000_340_000); } #[doc = "Generated from 'VK_EXT_device_fault'"] impl StructureType { pub const PHYSICAL_DEVICE_FAULT_FEATURES_EXT: Self = Self(1_000_341_000); pub const DEVICE_FAULT_COUNTS_EXT: Self = Self(1_000_341_001); pub const DEVICE_FAULT_INFO_EXT: Self = Self(1_000_341_002); } #[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"] impl PipelineColorBlendStateCreateFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT; } #[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"] impl PipelineDepthStencilStateCreateFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT; pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT; } #[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"] impl StructureType { pub const PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM: Self = Self::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT; } #[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"] impl SubpassDescriptionFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT; pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT; pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_ARM: Self = Self::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT; } #[doc = "Generated from 'VK_EXT_rgba10x6_formats'"] impl StructureType { pub const PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT: Self = Self(1_000_344_000); } #[doc = "Generated from 'VK_EXT_directfb_surface'"] impl StructureType { pub const DIRECTFB_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_346_000); } #[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"] impl DescriptorPoolCreateFlags { pub const HOST_ONLY_VALVE: Self = Self::HOST_ONLY_EXT; } #[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"] impl DescriptorSetLayoutCreateFlags { pub const HOST_ONLY_POOL_VALVE: Self = Self::HOST_ONLY_POOL_EXT; } #[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"] impl DescriptorType { pub const MUTABLE_VALVE: Self = Self::MUTABLE_EXT; } #[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"] impl StructureType { pub const PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE: Self = Self::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT; pub const MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE: Self = Self::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT; } #[doc = "Generated from 'VK_EXT_vertex_input_dynamic_state'"] impl DynamicState { pub const VERTEX_INPUT_EXT: Self = Self(1_000_352_000); } #[doc = "Generated from 'VK_EXT_vertex_input_dynamic_state'"] impl StructureType { pub const PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: Self = Self(1_000_352_000); pub const VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT: Self = Self(1_000_352_001); pub const VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT: Self = Self(1_000_352_002); } #[doc = "Generated from 'VK_EXT_physical_device_drm'"] impl StructureType { pub const PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: Self = Self(1_000_353_000); } #[doc = "Generated from 'VK_EXT_device_address_binding_report'"] impl DebugUtilsMessageTypeFlagsEXT { pub const DEVICE_ADDRESS_BINDING: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_device_address_binding_report'"] impl StructureType { pub const PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT: Self = Self(1_000_354_000); pub const DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT: Self = Self(1_000_354_001); } #[doc = "Generated from 'VK_EXT_depth_clip_control'"] impl StructureType { pub const PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: Self = Self(1_000_355_000); pub const PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT: Self = Self(1_000_355_001); } #[doc = "Generated from 'VK_EXT_primitive_topology_list_restart'"] impl StructureType { pub const PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: Self = Self(1_000_356_000); } #[doc = "Generated from 'VK_KHR_format_feature_flags2'"] impl StructureType { pub const FORMAT_PROPERTIES_3_KHR: Self = Self::FORMAT_PROPERTIES_3; } #[doc = "Generated from 'VK_FUCHSIA_external_memory'"] impl ExternalMemoryHandleTypeFlags { pub const ZIRCON_VMO_FUCHSIA: Self = Self(0b1000_0000_0000); } #[doc = "Generated from 'VK_FUCHSIA_external_memory'"] impl StructureType { pub const IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_364_000); pub const MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: Self = Self(1_000_364_001); pub const MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_364_002); } #[doc = "Generated from 'VK_FUCHSIA_external_semaphore'"] impl ExternalSemaphoreHandleTypeFlags { pub const ZIRCON_EVENT_FUCHSIA: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_FUCHSIA_external_semaphore'"] impl StructureType { pub const IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_365_000); pub const SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_365_001); } #[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"] impl DebugReportObjectTypeEXT { pub const BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_000); } #[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"] impl ObjectType { #[doc = "VkBufferCollectionFUCHSIA"] pub const BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_000); } #[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"] impl StructureType { pub const BUFFER_COLLECTION_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_000); pub const IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_001); pub const BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_002); pub const BUFFER_COLLECTION_PROPERTIES_FUCHSIA: Self = Self(1_000_366_003); pub const BUFFER_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_004); pub const BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_005); pub const IMAGE_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_006); pub const IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_007); pub const SYSMEM_COLOR_SPACE_FUCHSIA: Self = Self(1_000_366_008); pub const BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_009); } #[doc = "Generated from 'VK_HUAWEI_subpass_shading'"] impl PipelineBindPoint { pub const SUBPASS_SHADING_HUAWEI: Self = Self(1_000_369_003); } #[doc = "Generated from 'VK_HUAWEI_subpass_shading'"] impl PipelineStageFlags2 { pub const SUBPASS_SHADER_HUAWEI: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_subpass_shading'"] impl ShaderStageFlags { pub const SUBPASS_SHADING_HUAWEI: Self = Self(0b100_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_subpass_shading'"] impl StructureType { pub const SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI: Self = Self(1_000_369_000); pub const PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI: Self = Self(1_000_369_001); pub const PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI: Self = Self(1_000_369_002); } #[doc = "Generated from 'VK_HUAWEI_invocation_mask'"] impl AccessFlags2 { pub const INVOCATION_MASK_READ_HUAWEI: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_invocation_mask'"] impl ImageUsageFlags { pub const INVOCATION_MASK_HUAWEI: Self = Self(0b100_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_invocation_mask'"] impl PipelineStageFlags2 { pub const INVOCATION_MASK_HUAWEI: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_invocation_mask'"] impl StructureType { pub const PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI: Self = Self(1_000_370_000); } #[doc = "Generated from 'VK_NV_external_memory_rdma'"] impl ExternalMemoryHandleTypeFlags { pub const RDMA_ADDRESS_NV: Self = Self(0b1_0000_0000_0000); } #[doc = "Generated from 'VK_NV_external_memory_rdma'"] impl MemoryPropertyFlags { pub const RDMA_CAPABLE_NV: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_NV_external_memory_rdma'"] impl StructureType { pub const MEMORY_GET_REMOTE_ADDRESS_INFO_NV: Self = Self(1_000_371_000); pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV: Self = Self(1_000_371_001); } #[doc = "Generated from 'VK_EXT_pipeline_properties'"] impl StructureType { pub const PIPELINE_PROPERTIES_IDENTIFIER_EXT: Self = Self(1_000_372_000); pub const PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT: Self = Self(1_000_372_001); pub const PIPELINE_INFO_EXT: Self = Self::PIPELINE_INFO_KHR; } #[doc = "Generated from 'VK_EXT_frame_boundary'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT: Self = Self(1_000_375_000); pub const FRAME_BOUNDARY_EXT: Self = Self(1_000_375_001); } #[doc = "Generated from 'VK_EXT_multisampled_render_to_single_sampled'"] impl ImageCreateFlags { pub const MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT: Self = Self(0b100_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_multisampled_render_to_single_sampled'"] impl StructureType { pub const PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT: Self = Self(1_000_376_000); pub const SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT: Self = Self(1_000_376_001); pub const MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT: Self = Self(1_000_376_002); } #[doc = "Generated from 'VK_EXT_extended_dynamic_state2'"] impl DynamicState { #[doc = "Not promoted to 1.3"] pub const PATCH_CONTROL_POINTS_EXT: Self = Self(1_000_377_000); pub const RASTERIZER_DISCARD_ENABLE_EXT: Self = Self::RASTERIZER_DISCARD_ENABLE; pub const DEPTH_BIAS_ENABLE_EXT: Self = Self::DEPTH_BIAS_ENABLE; #[doc = "Not promoted to 1.3"] pub const LOGIC_OP_EXT: Self = Self(1_000_377_003); pub const PRIMITIVE_RESTART_ENABLE_EXT: Self = Self::PRIMITIVE_RESTART_ENABLE; } #[doc = "Generated from 'VK_EXT_extended_dynamic_state2'"] impl StructureType { #[doc = "Not promoted to 1.3"] pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: Self = Self(1_000_377_000); } #[doc = "Generated from 'VK_QNX_screen_surface'"] impl StructureType { pub const SCREEN_SURFACE_CREATE_INFO_QNX: Self = Self(1_000_378_000); } #[doc = "Generated from 'VK_EXT_color_write_enable'"] impl DynamicState { pub const COLOR_WRITE_ENABLE_EXT: Self = Self(1_000_381_000); } #[doc = "Generated from 'VK_EXT_color_write_enable'"] impl StructureType { pub const PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: Self = Self(1_000_381_000); pub const PIPELINE_COLOR_WRITE_CREATE_INFO_EXT: Self = Self(1_000_381_001); } #[doc = "Generated from 'VK_EXT_primitives_generated_query'"] impl QueryType { pub const PRIMITIVES_GENERATED_EXT: Self = Self(1_000_382_000); } #[doc = "Generated from 'VK_EXT_primitives_generated_query'"] impl StructureType { pub const PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT: Self = Self(1_000_382_000); } #[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"] impl AccessFlags2 { pub const SHADER_BINDING_TABLE_READ_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"] impl PipelineStageFlags2 { pub const ACCELERATION_STRUCTURE_COPY_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"] impl QueryType { pub const ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR: Self = Self(1_000_386_000); pub const ACCELERATION_STRUCTURE_SIZE_KHR: Self = Self(1_000_386_001); } #[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR: Self = Self(1_000_386_000); } #[doc = "Generated from 'VK_EXT_global_priority_query'"] impl StructureType { pub const PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT: Self = Self::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR; pub const QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT: Self = Self::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR; } #[doc = "Generated from 'VK_EXT_image_view_min_lod'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT: Self = Self(1_000_391_000); pub const IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT: Self = Self(1_000_391_001); } #[doc = "Generated from 'VK_EXT_multi_draw'"] impl StructureType { pub const PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: Self = Self(1_000_392_000); pub const PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: Self = Self(1_000_392_001); } #[doc = "Generated from 'VK_EXT_image_2d_view_of_3d'"] impl ImageCreateFlags { #[doc = "Image is created with a layout where individual slices are capable of being used as 2D images"] pub const TYPE_2D_VIEW_COMPATIBLE_EXT: Self = Self(0b10_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_image_2d_view_of_3d'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT: Self = Self(1_000_393_000); } #[doc = "Generated from 'VK_KHR_portability_enumeration'"] impl InstanceCreateFlags { pub const ENUMERATE_PORTABILITY_KHR: Self = Self(0b1); } #[doc = "Generated from 'VK_EXT_shader_tile_image'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT: Self = Self(1_000_395_000); pub const PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT: Self = Self(1_000_395_001); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl AccessFlags2 { pub const MICROMAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const MICROMAP_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl BufferUsageFlags { pub const MICROMAP_BUILD_INPUT_READ_ONLY_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const MICROMAP_STORAGE_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl BuildAccelerationStructureFlagsKHR { pub const ALLOW_OPACITY_MICROMAP_UPDATE_EXT: Self = Self(0b100_0000); pub const ALLOW_DISABLE_OPACITY_MICROMAPS_EXT: Self = Self(0b1000_0000); pub const ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl GeometryInstanceFlagsKHR { pub const FORCE_OPACITY_MICROMAP_2_STATE_EXT: Self = Self(0b1_0000); pub const DISABLE_OPACITY_MICROMAPS_EXT: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl ObjectType { pub const MICROMAP_EXT: Self = Self(1_000_396_000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl PipelineCreateFlags { pub const RAY_TRACING_OPACITY_MICROMAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl PipelineStageFlags2 { pub const MICROMAP_BUILD_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl QueryType { pub const MICROMAP_SERIALIZATION_SIZE_EXT: Self = Self(1_000_396_000); pub const MICROMAP_COMPACTED_SIZE_EXT: Self = Self(1_000_396_001); } #[doc = "Generated from 'VK_EXT_opacity_micromap'"] impl StructureType { pub const MICROMAP_BUILD_INFO_EXT: Self = Self(1_000_396_000); pub const MICROMAP_VERSION_INFO_EXT: Self = Self(1_000_396_001); pub const COPY_MICROMAP_INFO_EXT: Self = Self(1_000_396_002); pub const COPY_MICROMAP_TO_MEMORY_INFO_EXT: Self = Self(1_000_396_003); pub const COPY_MEMORY_TO_MICROMAP_INFO_EXT: Self = Self(1_000_396_004); pub const PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT: Self = Self(1_000_396_005); pub const PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT: Self = Self(1_000_396_006); pub const MICROMAP_CREATE_INFO_EXT: Self = Self(1_000_396_007); pub const MICROMAP_BUILD_SIZES_INFO_EXT: Self = Self(1_000_396_008); pub const ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT: Self = Self(1_000_396_009); } #[doc = "Generated from 'VK_NV_displacement_micromap'"] impl BuildAccelerationStructureFlagsKHR { pub const ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_NV_displacement_micromap'"] impl MicromapTypeEXT { pub const DISPLACEMENT_MICROMAP_NV: Self = Self(1_000_397_000); } #[doc = "Generated from 'VK_NV_displacement_micromap'"] impl PipelineCreateFlags { pub const RAY_TRACING_DISPLACEMENT_MICROMAP_NV: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_displacement_micromap'"] impl StructureType { pub const PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV: Self = Self(1_000_397_000); pub const PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV: Self = Self(1_000_397_001); pub const ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV: Self = Self(1_000_397_002); } #[doc = "Generated from 'VK_EXT_load_store_op_none'"] impl AttachmentLoadOp { pub const NONE_EXT: Self = Self::NONE_KHR; } #[doc = "Generated from 'VK_EXT_load_store_op_none'"] impl AttachmentStoreOp { pub const NONE_EXT: Self = Self::NONE; } #[doc = "Generated from 'VK_HUAWEI_cluster_culling_shader'"] impl PipelineStageFlags2 { pub const CLUSTER_CULLING_SHADER_HUAWEI: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_cluster_culling_shader'"] impl QueryPipelineStatisticFlags { pub const CLUSTER_CULLING_SHADER_INVOCATIONS_HUAWEI: Self = Self(0b10_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_cluster_culling_shader'"] impl ShaderStageFlags { pub const CLUSTER_CULLING_HUAWEI: Self = Self(0b1000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_HUAWEI_cluster_culling_shader'"] impl StructureType { pub const PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI: Self = Self(1_000_404_000); pub const PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI: Self = Self(1_000_404_001); pub const PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI: Self = Self(1_000_404_002); } #[doc = "Generated from 'VK_EXT_border_color_swizzle'"] impl StructureType { pub const PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT: Self = Self(1_000_411_000); pub const SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT: Self = Self(1_000_411_001); } #[doc = "Generated from 'VK_EXT_pageable_device_local_memory'"] impl StructureType { pub const PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT: Self = Self(1_000_412_000); } #[doc = "Generated from 'VK_KHR_maintenance4'"] impl ImageAspectFlags { pub const NONE_KHR: Self = Self::NONE; } #[doc = "Generated from 'VK_KHR_maintenance4'"] impl StructureType { pub const PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR: Self = Self::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES; pub const PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES; pub const DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR: Self = Self::DEVICE_BUFFER_MEMORY_REQUIREMENTS; pub const DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR: Self = Self::DEVICE_IMAGE_MEMORY_REQUIREMENTS; } #[doc = "Generated from 'VK_ARM_shader_core_properties'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM: Self = Self(1_000_415_000); } #[doc = "Generated from 'VK_KHR_shader_subgroup_rotate'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR: Self = Self(1_000_416_000); } #[doc = "Generated from 'VK_KHR_shader_subgroup_rotate'"] impl SubgroupFeatureFlags { pub const ROTATE_KHR: Self = Self(0b10_0000_0000); pub const ROTATE_CLUSTERED_KHR: Self = Self(0b100_0000_0000); } #[doc = "Generated from 'VK_ARM_scheduling_controls'"] impl StructureType { pub const DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM: Self = Self(1_000_417_000); pub const PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM: Self = Self(1_000_417_001); pub const PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM: Self = Self(1_000_417_002); } #[doc = "Generated from 'VK_EXT_image_sliced_view_of_3d'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT: Self = Self(1_000_418_000); pub const IMAGE_VIEW_SLICED_CREATE_INFO_EXT: Self = Self(1_000_418_001); } #[doc = "Generated from 'VK_VALVE_descriptor_set_host_mapping'"] impl StructureType { pub const PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE: Self = Self(1_000_420_000); pub const DESCRIPTOR_SET_BINDING_REFERENCE_VALVE: Self = Self(1_000_420_001); pub const DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE: Self = Self(1_000_420_002); } #[doc = "Generated from 'VK_EXT_depth_clamp_zero_one'"] impl StructureType { pub const PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT: Self = Self(1_000_421_000); } #[doc = "Generated from 'VK_EXT_non_seamless_cube_map'"] impl SamplerCreateFlags { pub const NON_SEAMLESS_CUBE_MAP_EXT: Self = Self(0b100); } #[doc = "Generated from 'VK_EXT_non_seamless_cube_map'"] impl StructureType { pub const PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT: Self = Self(1_000_422_000); } #[doc = "Generated from 'VK_ARM_render_pass_striped'"] impl StructureType { pub const PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM: Self = Self(1_000_424_000); pub const PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM: Self = Self(1_000_424_001); pub const RENDER_PASS_STRIPE_BEGIN_INFO_ARM: Self = Self(1_000_424_002); pub const RENDER_PASS_STRIPE_INFO_ARM: Self = Self(1_000_424_003); pub const RENDER_PASS_STRIPE_SUBMIT_INFO_ARM: Self = Self(1_000_424_004); } #[doc = "Generated from 'VK_QCOM_fragment_density_map_offset'"] impl ImageCreateFlags { pub const FRAGMENT_DENSITY_MAP_OFFSET_QCOM: Self = Self(0b1000_0000_0000_0000); } #[doc = "Generated from 'VK_QCOM_fragment_density_map_offset'"] impl StructureType { pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM: Self = Self(1_000_425_000); pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM: Self = Self(1_000_425_001); pub const SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM: Self = Self(1_000_425_002); } #[doc = "Generated from 'VK_NV_copy_memory_indirect'"] impl StructureType { pub const PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV: Self = Self(1_000_426_000); pub const PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV: Self = Self(1_000_426_001); } #[doc = "Generated from 'VK_NV_memory_decompression'"] impl StructureType { pub const PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV: Self = Self(1_000_427_000); pub const PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV: Self = Self(1_000_427_001); } #[doc = "Generated from 'VK_NV_device_generated_commands_compute'"] impl DescriptorSetLayoutCreateFlags { pub const INDIRECT_BINDABLE_NV: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_NV_device_generated_commands_compute'"] impl IndirectCommandsTokenTypeNV { pub const PIPELINE: Self = Self(1_000_428_003); pub const DISPATCH: Self = Self(1_000_428_004); } #[doc = "Generated from 'VK_NV_device_generated_commands_compute'"] impl StructureType { pub const PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV: Self = Self(1_000_428_000); pub const COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV: Self = Self(1_000_428_001); pub const PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV: Self = Self(1_000_428_002); } #[doc = "Generated from 'VK_NV_linear_color_attachment'"] impl FormatFeatureFlags2 { #[doc = "Format support linear image as render target, it cannot be mixed with non linear attachment"] pub const LINEAR_COLOR_ATTACHMENT_NV: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_linear_color_attachment'"] impl StructureType { pub const PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV: Self = Self(1_000_430_000); } #[doc = "Generated from 'VK_KHR_shader_maximal_reconvergence'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR: Self = Self(1_000_434_000); } #[doc = "Generated from 'VK_EXT_image_compression_control_swapchain'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT: Self = Self(1_000_437_000); } #[doc = "Generated from 'VK_QCOM_image_processing'"] impl DescriptorType { pub const SAMPLE_WEIGHT_IMAGE_QCOM: Self = Self(1_000_440_000); pub const BLOCK_MATCH_IMAGE_QCOM: Self = Self(1_000_440_001); } #[doc = "Generated from 'VK_QCOM_image_processing'"] impl FormatFeatureFlags2 { pub const WEIGHT_IMAGE_QCOM: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000); pub const WEIGHT_SAMPLED_IMAGE_QCOM: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000); pub const BLOCK_MATCHING_QCOM: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const BOX_FILTER_SAMPLED_QCOM: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_QCOM_image_processing'"] impl ImageUsageFlags { pub const SAMPLE_WEIGHT_QCOM: Self = Self(0b1_0000_0000_0000_0000_0000); pub const SAMPLE_BLOCK_MATCH_QCOM: Self = Self(0b10_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_QCOM_image_processing'"] impl SamplerCreateFlags { pub const IMAGE_PROCESSING_QCOM: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_QCOM_image_processing'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM: Self = Self(1_000_440_000); pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM: Self = Self(1_000_440_001); pub const IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM: Self = Self(1_000_440_002); } #[doc = "Generated from 'VK_EXT_nested_command_buffer'"] impl RenderingFlags { pub const CONTENTS_INLINE_EXT: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_EXT_nested_command_buffer'"] impl StructureType { pub const PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT: Self = Self(1_000_451_000); pub const PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT: Self = Self(1_000_451_001); } #[doc = "Generated from 'VK_EXT_nested_command_buffer'"] impl SubpassContents { pub const INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT: Self = Self(1_000_451_000); } #[doc = "Generated from 'VK_EXT_external_memory_acquire_unmodified'"] impl StructureType { pub const EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT: Self = Self(1_000_453_000); } #[doc = "Generated from 'VK_EXT_extended_dynamic_state3'"] impl DynamicState { pub const DEPTH_CLAMP_ENABLE_EXT: Self = Self(1_000_455_003); pub const POLYGON_MODE_EXT: Self = Self(1_000_455_004); pub const RASTERIZATION_SAMPLES_EXT: Self = Self(1_000_455_005); pub const SAMPLE_MASK_EXT: Self = Self(1_000_455_006); pub const ALPHA_TO_COVERAGE_ENABLE_EXT: Self = Self(1_000_455_007); pub const ALPHA_TO_ONE_ENABLE_EXT: Self = Self(1_000_455_008); pub const LOGIC_OP_ENABLE_EXT: Self = Self(1_000_455_009); pub const COLOR_BLEND_ENABLE_EXT: Self = Self(1_000_455_010); pub const COLOR_BLEND_EQUATION_EXT: Self = Self(1_000_455_011); pub const COLOR_WRITE_MASK_EXT: Self = Self(1_000_455_012); pub const TESSELLATION_DOMAIN_ORIGIN_EXT: Self = Self(1_000_455_002); pub const RASTERIZATION_STREAM_EXT: Self = Self(1_000_455_013); pub const CONSERVATIVE_RASTERIZATION_MODE_EXT: Self = Self(1_000_455_014); pub const EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT: Self = Self(1_000_455_015); pub const DEPTH_CLIP_ENABLE_EXT: Self = Self(1_000_455_016); pub const SAMPLE_LOCATIONS_ENABLE_EXT: Self = Self(1_000_455_017); pub const COLOR_BLEND_ADVANCED_EXT: Self = Self(1_000_455_018); pub const PROVOKING_VERTEX_MODE_EXT: Self = Self(1_000_455_019); pub const LINE_RASTERIZATION_MODE_EXT: Self = Self(1_000_455_020); pub const LINE_STIPPLE_ENABLE_EXT: Self = Self(1_000_455_021); pub const DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT: Self = Self(1_000_455_022); pub const VIEWPORT_W_SCALING_ENABLE_NV: Self = Self(1_000_455_023); pub const VIEWPORT_SWIZZLE_NV: Self = Self(1_000_455_024); pub const COVERAGE_TO_COLOR_ENABLE_NV: Self = Self(1_000_455_025); pub const COVERAGE_TO_COLOR_LOCATION_NV: Self = Self(1_000_455_026); pub const COVERAGE_MODULATION_MODE_NV: Self = Self(1_000_455_027); pub const COVERAGE_MODULATION_TABLE_ENABLE_NV: Self = Self(1_000_455_028); pub const COVERAGE_MODULATION_TABLE_NV: Self = Self(1_000_455_029); pub const SHADING_RATE_IMAGE_ENABLE_NV: Self = Self(1_000_455_030); pub const REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV: Self = Self(1_000_455_031); pub const COVERAGE_REDUCTION_MODE_NV: Self = Self(1_000_455_032); } #[doc = "Generated from 'VK_EXT_extended_dynamic_state3'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT: Self = Self(1_000_455_000); pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT: Self = Self(1_000_455_001); } #[doc = "Generated from 'VK_EXT_subpass_merge_feedback'"] impl StructureType { pub const PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT: Self = Self(1_000_458_000); pub const RENDER_PASS_CREATION_CONTROL_EXT: Self = Self(1_000_458_001); pub const RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT: Self = Self(1_000_458_002); pub const RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT: Self = Self(1_000_458_003); } #[doc = "Generated from 'VK_LUNARG_direct_driver_loading'"] impl StructureType { pub const DIRECT_DRIVER_LOADING_INFO_LUNARG: Self = Self(1_000_459_000); pub const DIRECT_DRIVER_LOADING_LIST_LUNARG: Self = Self(1_000_459_001); } #[doc = "Generated from 'VK_EXT_shader_module_identifier'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT: Self = Self(1_000_462_000); pub const PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT: Self = Self(1_000_462_001); pub const PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT: Self = Self(1_000_462_002); pub const SHADER_MODULE_IDENTIFIER_EXT: Self = Self(1_000_462_003); } #[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"] impl PipelineColorBlendStateCreateFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT: Self = Self(0b1); } #[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"] impl PipelineDepthStencilStateCreateFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT: Self = Self(0b1); pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT: Self = Self(0b10); } #[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"] impl StructureType { pub const PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT: Self = Self(1_000_342_000); } #[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"] impl SubpassDescriptionFlags { pub const RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT: Self = Self(0b1_0000); pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT: Self = Self(0b10_0000); pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl AccessFlags2 { pub const OPTICAL_FLOW_READ_NV: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const OPTICAL_FLOW_WRITE_NV: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl Format { pub const R16G16_S10_5_NV: Self = Self(1_000_464_000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl FormatFeatureFlags2 { pub const OPTICAL_FLOW_IMAGE_NV: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const OPTICAL_FLOW_VECTOR_NV: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); pub const OPTICAL_FLOW_COST_NV: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl ObjectType { pub const OPTICAL_FLOW_SESSION_NV: Self = Self(1_000_464_000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl PipelineStageFlags2 { pub const OPTICAL_FLOW_NV: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl QueueFlags { pub const OPTICAL_FLOW_NV: Self = Self(0b1_0000_0000); } #[doc = "Generated from 'VK_NV_optical_flow'"] impl StructureType { pub const PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV: Self = Self(1_000_464_000); pub const PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV: Self = Self(1_000_464_001); pub const OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV: Self = Self(1_000_464_002); pub const OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV: Self = Self(1_000_464_003); pub const OPTICAL_FLOW_SESSION_CREATE_INFO_NV: Self = Self(1_000_464_004); pub const OPTICAL_FLOW_EXECUTE_INFO_NV: Self = Self(1_000_464_005); pub const OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV: Self = Self(1_000_464_010); } #[doc = "Generated from 'VK_EXT_legacy_dithering'"] impl RenderingFlags { pub const ENABLE_LEGACY_DITHERING_EXT: Self = Self(0b1000); } #[doc = "Generated from 'VK_EXT_legacy_dithering'"] impl StructureType { pub const PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT: Self = Self(1_000_465_000); } #[doc = "Generated from 'VK_EXT_legacy_dithering'"] impl SubpassDescriptionFlags { pub const ENABLE_LEGACY_DITHERING_EXT: Self = Self(0b1000_0000); } #[doc = "Generated from 'VK_EXT_pipeline_protected_access'"] impl PipelineCreateFlags { pub const NO_PROTECTED_ACCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); pub const PROTECTED_ACCESS_ONLY_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_EXT_pipeline_protected_access'"] impl StructureType { pub const PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT: Self = Self(1_000_466_000); } #[doc = "Generated from 'VK_ANDROID_external_format_resolve'"] impl ResolveModeFlags { pub const EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_ANDROID_external_format_resolve'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID: Self = Self(1_000_468_000); pub const PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID: Self = Self(1_000_468_001); pub const ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID: Self = Self(1_000_468_002); } #[doc = "Generated from 'VK_KHR_maintenance5'"] impl BufferUsageFlags2KHR { pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b10_0000_0000); pub const SHADER_BINDING_TABLE: Self = Self(0b100_0000_0000); pub const RAY_TRACING_NV: Self = Self::SHADER_BINDING_TABLE; pub const TRANSFORM_FEEDBACK_BUFFER_EXT: Self = Self(0b1000_0000_0000); pub const TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT: Self = Self(0b1_0000_0000_0000); pub const VIDEO_DECODE_SRC: Self = Self(0b10_0000_0000_0000); pub const VIDEO_DECODE_DST: Self = Self(0b100_0000_0000_0000); pub const VIDEO_ENCODE_DST: Self = Self(0b1000_0000_0000_0000); pub const VIDEO_ENCODE_SRC: Self = Self(0b1_0000_0000_0000_0000); pub const SHADER_DEVICE_ADDRESS: Self = Self(0b10_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY: Self = Self(0b1000_0000_0000_0000_0000); pub const ACCELERATION_STRUCTURE_STORAGE: Self = Self(0b1_0000_0000_0000_0000_0000); pub const SAMPLER_DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000); pub const RESOURCE_DESCRIPTOR_BUFFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000); pub const PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); pub const MICROMAP_BUILD_INPUT_READ_ONLY_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const MICROMAP_STORAGE_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_maintenance5'"] impl Format { pub const A1B5G5R5_UNORM_PACK16_KHR: Self = Self(1_000_470_000); pub const A8_UNORM_KHR: Self = Self(1_000_470_001); } #[doc = "Generated from 'VK_KHR_maintenance5'"] impl PipelineCreateFlags2KHR { pub const VIEW_INDEX_FROM_DEVICE_INDEX: Self = Self(0b1000); pub const DISPATCH_BASE: Self = Self(0b1_0000); pub const DEFER_COMPILE_NV: Self = Self(0b10_0000); pub const CAPTURE_STATISTICS: Self = Self(0b100_0000); pub const CAPTURE_INTERNAL_REPRESENTATIONS: Self = Self(0b1000_0000); pub const FAIL_ON_PIPELINE_COMPILE_REQUIRED: Self = Self(0b1_0000_0000); pub const EARLY_RETURN_ON_FAILURE: Self = Self(0b10_0000_0000); pub const LINK_TIME_OPTIMIZATION_EXT: Self = Self(0b100_0000_0000); pub const RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000); pub const LIBRARY: Self = Self(0b1000_0000_0000); pub const RAY_TRACING_SKIP_TRIANGLES: Self = Self(0b1_0000_0000_0000); pub const RAY_TRACING_SKIP_AABBS: Self = Self(0b10_0000_0000_0000); pub const RAY_TRACING_NO_NULL_ANY_HIT_SHADERS: Self = Self(0b100_0000_0000_0000); pub const RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS: Self = Self(0b1000_0000_0000_0000); pub const RAY_TRACING_NO_NULL_MISS_SHADERS: Self = Self(0b1_0000_0000_0000_0000); pub const RAY_TRACING_NO_NULL_INTERSECTION_SHADERS: Self = Self(0b10_0000_0000_0000_0000); pub const RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY: Self = Self(0b1000_0000_0000_0000_0000); pub const INDIRECT_BINDABLE_NV: Self = Self(0b100_0000_0000_0000_0000); pub const RAY_TRACING_ALLOW_MOTION_NV: Self = Self(0b1_0000_0000_0000_0000_0000); pub const RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT: Self = Self(0b10_0000_0000_0000_0000_0000); pub const RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT: Self = Self(0b100_0000_0000_0000_0000_0000); pub const RAY_TRACING_OPACITY_MICROMAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000); pub const COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000); pub const DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000); pub const NO_PROTECTED_ACCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000); pub const PROTECTED_ACCESS_ONLY_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000); pub const RAY_TRACING_DISPLACEMENT_MICROMAP_NV: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000); pub const DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_maintenance5'"] impl StructureType { pub const PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR: Self = Self(1_000_470_000); pub const PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR: Self = Self(1_000_470_001); pub const RENDERING_AREA_INFO_KHR: Self = Self(1_000_470_003); pub const DEVICE_IMAGE_SUBRESOURCE_INFO_KHR: Self = Self(1_000_470_004); pub const SUBRESOURCE_LAYOUT_2_KHR: Self = Self(1_000_338_002); pub const IMAGE_SUBRESOURCE_2_KHR: Self = Self(1_000_338_003); pub const PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR: Self = Self(1_000_470_005); pub const BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR: Self = Self(1_000_470_006); } #[doc = "Generated from 'VK_KHR_ray_tracing_position_fetch'"] impl BuildAccelerationStructureFlagsKHR { pub const ALLOW_DATA_ACCESS: Self = Self(0b1000_0000_0000); } #[doc = "Generated from 'VK_KHR_ray_tracing_position_fetch'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR: Self = Self(1_000_481_000); } #[doc = "Generated from 'VK_EXT_shader_object'"] impl ObjectType { pub const SHADER_EXT: Self = Self(1_000_482_000); } #[doc = "Generated from 'VK_EXT_shader_object'"] impl Result { pub const INCOMPATIBLE_SHADER_BINARY_EXT: Self = Self(1_000_482_000); } #[doc = "Generated from 'VK_EXT_shader_object'"] impl ShaderCreateFlagsEXT { pub const ALLOW_VARYING_SUBGROUP_SIZE: Self = Self(0b10); pub const REQUIRE_FULL_SUBGROUPS: Self = Self(0b100); pub const NO_TASK_SHADER: Self = Self(0b1000); pub const DISPATCH_BASE: Self = Self(0b1_0000); pub const FRAGMENT_SHADING_RATE_ATTACHMENT: Self = Self(0b10_0000); pub const FRAGMENT_DENSITY_MAP_ATTACHMENT: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_EXT_shader_object'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT: Self = Self(1_000_482_000); pub const PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT: Self = Self(1_000_482_001); pub const SHADER_CREATE_INFO_EXT: Self = Self(1_000_482_002); pub const SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT: Self = Self::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO; } #[doc = "Generated from 'VK_QCOM_tile_properties'"] impl StructureType { pub const PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM: Self = Self(1_000_484_000); pub const TILE_PROPERTIES_QCOM: Self = Self(1_000_484_001); } #[doc = "Generated from 'VK_SEC_amigo_profiling'"] impl StructureType { pub const PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC: Self = Self(1_000_485_000); pub const AMIGO_PROFILING_SUBMIT_INFO_SEC: Self = Self(1_000_485_001); } #[doc = "Generated from 'VK_QCOM_multiview_per_view_viewports'"] impl StructureType { pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM: Self = Self(1_000_488_000); } #[doc = "Generated from 'VK_NV_ray_tracing_invocation_reorder'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV: Self = Self(1_000_490_000); pub const PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV: Self = Self(1_000_490_001); } #[doc = "Generated from 'VK_NV_extended_sparse_address_space'"] impl StructureType { pub const PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV: Self = Self(1_000_492_000); pub const PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV: Self = Self(1_000_492_001); } #[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"] impl DescriptorPoolCreateFlags { pub const HOST_ONLY_EXT: Self = Self(0b100); } #[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"] impl DescriptorSetLayoutCreateFlags { pub const HOST_ONLY_POOL_EXT: Self = Self(0b100); } #[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"] impl DescriptorType { pub const MUTABLE_EXT: Self = Self(1_000_351_000); } #[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"] impl StructureType { pub const PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT: Self = Self(1_000_351_000); pub const MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT: Self = Self(1_000_351_002); } #[doc = "Generated from 'VK_EXT_layer_settings'"] impl StructureType { pub const LAYER_SETTINGS_CREATE_INFO_EXT: Self = Self(1_000_496_000); } #[doc = "Generated from 'VK_ARM_shader_core_builtins'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM: Self = Self(1_000_497_000); pub const PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM: Self = Self(1_000_497_001); } #[doc = "Generated from 'VK_EXT_pipeline_library_group_handles'"] impl StructureType { pub const PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT: Self = Self(1_000_498_000); } #[doc = "Generated from 'VK_EXT_dynamic_rendering_unused_attachments'"] impl StructureType { pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT: Self = Self(1_000_499_000); } #[doc = "Generated from 'VK_NV_low_latency2'"] impl StructureType { pub const LATENCY_SLEEP_MODE_INFO_NV: Self = Self(1_000_505_000); pub const LATENCY_SLEEP_INFO_NV: Self = Self(1_000_505_001); pub const SET_LATENCY_MARKER_INFO_NV: Self = Self(1_000_505_002); pub const GET_LATENCY_MARKER_INFO_NV: Self = Self(1_000_505_003); pub const LATENCY_TIMINGS_FRAME_REPORT_NV: Self = Self(1_000_505_004); pub const LATENCY_SUBMISSION_PRESENT_ID_NV: Self = Self(1_000_505_005); pub const OUT_OF_BAND_QUEUE_TYPE_INFO_NV: Self = Self(1_000_505_006); pub const SWAPCHAIN_LATENCY_CREATE_INFO_NV: Self = Self(1_000_505_007); pub const LATENCY_SURFACE_CAPABILITIES_NV: Self = Self(1_000_505_008); } #[doc = "Generated from 'VK_KHR_cooperative_matrix'"] impl StructureType { pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR: Self = Self(1_000_506_000); pub const COOPERATIVE_MATRIX_PROPERTIES_KHR: Self = Self(1_000_506_001); pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR: Self = Self(1_000_506_002); } #[doc = "Generated from 'VK_QCOM_multiview_per_view_render_areas'"] impl StructureType { pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM: Self = Self(1_000_510_000); pub const MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM: Self = Self(1_000_510_001); } #[doc = "Generated from 'VK_KHR_video_decode_av1'"] impl StructureType { pub const VIDEO_DECODE_AV1_CAPABILITIES_KHR: Self = Self(1_000_512_000); pub const VIDEO_DECODE_AV1_PICTURE_INFO_KHR: Self = Self(1_000_512_001); pub const VIDEO_DECODE_AV1_PROFILE_INFO_KHR: Self = Self(1_000_512_003); pub const VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_512_004); pub const VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR: Self = Self(1_000_512_005); } #[doc = "Generated from 'VK_KHR_video_decode_av1'"] impl VideoCodecOperationFlagsKHR { pub const DECODE_AV1: Self = Self(0b100); } #[doc = "Generated from 'VK_KHR_video_maintenance1'"] impl BufferCreateFlags { pub const VIDEO_PROFILE_INDEPENDENT_KHR: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_KHR_video_maintenance1'"] impl ImageCreateFlags { pub const VIDEO_PROFILE_INDEPENDENT_KHR: Self = Self(0b1_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_KHR_video_maintenance1'"] impl StructureType { pub const PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR: Self = Self(1_000_515_000); pub const VIDEO_INLINE_QUERY_INFO_KHR: Self = Self(1_000_515_001); } #[doc = "Generated from 'VK_KHR_video_maintenance1'"] impl VideoSessionCreateFlagsKHR { pub const INLINE_QUERIES: Self = Self(0b100); } #[doc = "Generated from 'VK_NV_per_stage_descriptor_set'"] impl DescriptorSetLayoutCreateFlags { pub const PER_STAGE_NV: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_NV_per_stage_descriptor_set'"] impl StructureType { pub const PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV: Self = Self(1_000_516_000); } #[doc = "Generated from 'VK_QCOM_image_processing2'"] impl StructureType { pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM: Self = Self(1_000_518_000); pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM: Self = Self(1_000_518_001); pub const SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM: Self = Self(1_000_518_002); } #[doc = "Generated from 'VK_QCOM_filter_cubic_weights'"] impl StructureType { pub const SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM: Self = Self(1_000_519_000); pub const PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM: Self = Self(1_000_519_001); pub const BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM: Self = Self(1_000_519_002); } #[doc = "Generated from 'VK_QCOM_ycbcr_degamma'"] impl StructureType { pub const PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM: Self = Self(1_000_520_000); pub const SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM: Self = Self(1_000_520_001); } #[doc = "Generated from 'VK_QCOM_filter_cubic_clamp'"] impl SamplerReductionMode { pub const WEIGHTED_AVERAGE_RANGECLAMP_QCOM: Self = Self(1_000_521_000); } #[doc = "Generated from 'VK_QCOM_filter_cubic_clamp'"] impl StructureType { pub const PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM: Self = Self(1_000_521_000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_dynamic_state'"] impl DynamicState { pub const ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT: Self = Self(1_000_524_000); } #[doc = "Generated from 'VK_EXT_attachment_feedback_loop_dynamic_state'"] impl StructureType { pub const PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT: Self = Self(1_000_524_000); } #[doc = "Generated from 'VK_KHR_vertex_attribute_divisor'"] impl StructureType { pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR: Self = Self(1_000_525_000); pub const PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR: Self = Self(1_000_190_001); pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR: Self = Self(1_000_190_002); } #[doc = "Generated from 'VK_KHR_load_store_op_none'"] impl AttachmentLoadOp { pub const NONE_KHR: Self = Self(1_000_400_000); } #[doc = "Generated from 'VK_KHR_shader_float_controls2'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR: Self = Self(1_000_528_000); } #[doc = "Generated from 'VK_QNX_external_memory_screen_buffer'"] impl ExternalMemoryHandleTypeFlags { pub const SCREEN_BUFFER_QNX: Self = Self(0b100_0000_0000_0000); } #[doc = "Generated from 'VK_QNX_external_memory_screen_buffer'"] impl StructureType { pub const SCREEN_BUFFER_PROPERTIES_QNX: Self = Self(1_000_529_000); pub const SCREEN_BUFFER_FORMAT_PROPERTIES_QNX: Self = Self(1_000_529_001); pub const IMPORT_SCREEN_BUFFER_INFO_QNX: Self = Self(1_000_529_002); pub const EXTERNAL_FORMAT_QNX: Self = Self(1_000_529_003); pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX: Self = Self(1_000_529_004); } #[doc = "Generated from 'VK_MSFT_layered_driver'"] impl StructureType { pub const PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT: Self = Self(1_000_530_000); } #[doc = "Generated from 'VK_KHR_index_type_uint8'"] impl IndexType { pub const UINT8_KHR: Self = Self(1_000_265_000); } #[doc = "Generated from 'VK_KHR_index_type_uint8'"] impl StructureType { pub const PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR: Self = Self(1_000_265_000); } #[doc = "Generated from 'VK_KHR_line_rasterization'"] impl DynamicState { pub const LINE_STIPPLE_KHR: Self = Self(1_000_259_000); } #[doc = "Generated from 'VK_KHR_line_rasterization'"] impl StructureType { pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR: Self = Self(1_000_259_000); pub const PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR: Self = Self(1_000_259_001); pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR: Self = Self(1_000_259_002); } #[doc = "Generated from 'VK_KHR_calibrated_timestamps'"] impl StructureType { pub const CALIBRATED_TIMESTAMP_INFO_KHR: Self = Self(1_000_184_000); } #[doc = "Generated from 'VK_KHR_shader_expect_assume'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR: Self = Self(1_000_544_000); } #[doc = "Generated from 'VK_KHR_maintenance6'"] impl StructureType { pub const PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR: Self = Self(1_000_545_000); pub const PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR: Self = Self(1_000_545_001); pub const BIND_MEMORY_STATUS_KHR: Self = Self(1_000_545_002); pub const BIND_DESCRIPTOR_SETS_INFO_KHR: Self = Self(1_000_545_003); pub const PUSH_CONSTANTS_INFO_KHR: Self = Self(1_000_545_004); pub const PUSH_DESCRIPTOR_SET_INFO_KHR: Self = Self(1_000_545_005); pub const PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR: Self = Self(1_000_545_006); pub const SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT: Self = Self(1_000_545_007); pub const BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT: Self = Self(1_000_545_008); } #[doc = "Generated from 'VK_NV_descriptor_pool_overallocation'"] impl DescriptorPoolCreateFlags { pub const ALLOW_OVERALLOCATION_SETS_NV: Self = Self(0b1000); pub const ALLOW_OVERALLOCATION_POOLS_NV: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_NV_descriptor_pool_overallocation'"] impl StructureType { pub const PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV: Self = Self(1_000_546_000); } #[doc = "Generated from 'VK_NV_raw_access_chains'"] impl StructureType { pub const PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV: Self = Self(1_000_555_000); } #[doc = "Generated from 'VK_NV_shader_atomic_float16_vector'"] impl StructureType { pub const PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV: Self = Self(1_000_563_000); } #[doc = "Generated from 'VK_NV_ray_tracing_validation'"] impl StructureType { pub const PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV: Self = Self(1_000_568_000); } pub const KHR_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_surface\0") }; pub const KHR_SURFACE_SPEC_VERSION: u32 = 25u32; #[allow(non_camel_case_types)] pub type PFN_vkDestroySurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, surface: SurfaceKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceSupportKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, surface: SurfaceKHR, p_supported: *mut Bool32, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, surface: SurfaceKHR, p_surface_capabilities: *mut SurfaceCapabilitiesKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceFormatsKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, surface: SurfaceKHR, p_surface_format_count: *mut u32, p_surface_formats: *mut SurfaceFormatKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfacePresentModesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, surface: SurfaceKHR, p_present_mode_count: *mut u32, p_present_modes: *mut PresentModeKHR, ) -> Result; pub const KHR_SWAPCHAIN_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_swapchain\0") }; pub const KHR_SWAPCHAIN_SPEC_VERSION: u32 = 70u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDevicePresentRectanglesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, surface: SurfaceKHR, p_rect_count: *mut u32, p_rects: *mut Rect2D, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateSwapchainKHR = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const SwapchainCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_swapchain: *mut SwapchainKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroySwapchainKHR = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetSwapchainImagesKHR = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_swapchain_image_count: *mut u32, p_swapchain_images: *mut Image, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAcquireNextImageKHR = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, timeout: u64, semaphore: Semaphore, fence: Fence, p_image_index: *mut u32, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkQueuePresentKHR = unsafe extern "system" fn(queue: Queue, p_present_info: *const PresentInfoKHR<'_>) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceGroupPresentCapabilitiesKHR = unsafe extern "system" fn( device: crate::vk::Device, p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceGroupSurfacePresentModesKHR = unsafe extern "system" fn( device: crate::vk::Device, surface: SurfaceKHR, p_modes: *mut DeviceGroupPresentModeFlagsKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAcquireNextImage2KHR = unsafe extern "system" fn( device: crate::vk::Device, p_acquire_info: *const AcquireNextImageInfoKHR<'_>, p_image_index: *mut u32, ) -> Result; pub const KHR_DISPLAY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_display\0") }; pub const KHR_DISPLAY_SPEC_VERSION: u32 = 23u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceDisplayPropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut DisplayPropertiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut DisplayPlanePropertiesKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDisplayPlaneSupportedDisplaysKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, plane_index: u32, p_display_count: *mut u32, p_displays: *mut DisplayKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDisplayModePropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, display: DisplayKHR, p_property_count: *mut u32, p_properties: *mut DisplayModePropertiesKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateDisplayModeKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, display: DisplayKHR, p_create_info: *const DisplayModeCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_mode: *mut DisplayModeKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDisplayPlaneCapabilitiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, mode: DisplayModeKHR, plane_index: u32, p_capabilities: *mut DisplayPlaneCapabilitiesKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateDisplayPlaneSurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const DisplaySurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const KHR_DISPLAY_SWAPCHAIN_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_display_swapchain\0") }; pub const KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION: u32 = 10u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateSharedSwapchainsKHR = unsafe extern "system" fn( device: crate::vk::Device, swapchain_count: u32, p_create_infos: *const SwapchainCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_swapchains: *mut SwapchainKHR, ) -> Result; pub const KHR_XLIB_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_xlib_surface\0") }; pub const KHR_XLIB_SURFACE_SPEC_VERSION: u32 = 6u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateXlibSurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const XlibSurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, dpy: *mut Display, visual_id: VisualID, ) -> Bool32; pub const KHR_XCB_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_xcb_surface\0") }; pub const KHR_XCB_SURFACE_SPEC_VERSION: u32 = 6u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateXcbSurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const XcbSurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, connection: *mut xcb_connection_t, visual_id: xcb_visualid_t, ) -> Bool32; pub const KHR_WAYLAND_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_wayland_surface\0") }; pub const KHR_WAYLAND_SURFACE_SPEC_VERSION: u32 = 6u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateWaylandSurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const WaylandSurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, display: *mut wl_display, ) -> Bool32; pub const KHR_ANDROID_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_android_surface\0") }; pub const KHR_ANDROID_SURFACE_SPEC_VERSION: u32 = 6u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateAndroidSurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const AndroidSurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const KHR_WIN32_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_win32_surface\0") }; pub const KHR_WIN32_SURFACE_SPEC_VERSION: u32 = 6u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateWin32SurfaceKHR = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const Win32SurfaceCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR = unsafe extern "system" fn(physical_device: PhysicalDevice, queue_family_index: u32) -> Bool32; pub const ANDROID_NATIVE_BUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ANDROID_native_buffer\0") }; pub const ANDROID_NATIVE_BUFFER_SPEC_VERSION: u32 = 8u32; #[allow(non_camel_case_types)] pub type PFN_vkGetSwapchainGrallocUsageANDROID = unsafe extern "system" fn( device: crate::vk::Device, format: Format, image_usage: ImageUsageFlags, gralloc_usage: *mut c_int, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAcquireImageANDROID = unsafe extern "system" fn( device: crate::vk::Device, image: Image, native_fence_fd: c_int, semaphore: Semaphore, fence: Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkQueueSignalReleaseImageANDROID = unsafe extern "system" fn( queue: Queue, wait_semaphore_count: u32, p_wait_semaphores: *const Semaphore, image: Image, p_native_fence_fd: *mut c_int, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSwapchainGrallocUsage2ANDROID = unsafe extern "system" fn( device: crate::vk::Device, format: Format, image_usage: ImageUsageFlags, swapchain_image_usage: SwapchainImageUsageFlagsANDROID, gralloc_consumer_usage: *mut u64, gralloc_producer_usage: *mut u64, ) -> Result; pub const EXT_DEBUG_REPORT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_report\0") }; pub const EXT_DEBUG_REPORT_SPEC_VERSION: u32 = 10u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateDebugReportCallbackEXT = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const DebugReportCallbackCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_callback: *mut DebugReportCallbackEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDebugReportCallbackEXT = unsafe extern "system" fn( instance: crate::vk::Instance, callback: DebugReportCallbackEXT, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkDebugReportMessageEXT = unsafe extern "system" fn( instance: crate::vk::Instance, flags: DebugReportFlagsEXT, object_type: DebugReportObjectTypeEXT, object: u64, location: usize, message_code: i32, p_layer_prefix: *const c_char, p_message: *const c_char, ); pub const NV_GLSL_SHADER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_glsl_shader\0") }; pub const NV_GLSL_SHADER_SPEC_VERSION: u32 = 1u32; pub const EXT_DEPTH_RANGE_UNRESTRICTED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_range_unrestricted\0") }; pub const EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION: u32 = 1u32; pub const KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_sampler_mirror_clamp_to_edge\0") }; pub const KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION: u32 = 3u32; pub const IMG_FILTER_CUBIC_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_IMG_filter_cubic\0") }; pub const IMG_FILTER_CUBIC_SPEC_VERSION: u32 = 1u32; pub const AMD_RASTERIZATION_ORDER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_rasterization_order\0") }; pub const AMD_RASTERIZATION_ORDER_SPEC_VERSION: u32 = 1u32; pub const AMD_SHADER_TRINARY_MINMAX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_trinary_minmax\0") }; pub const AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION: u32 = 1u32; pub const AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_explicit_vertex_parameter\0") }; pub const AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION: u32 = 1u32; pub const EXT_DEBUG_MARKER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_marker\0") }; pub const EXT_DEBUG_MARKER_SPEC_VERSION: u32 = 4u32; #[allow(non_camel_case_types)] pub type PFN_vkDebugMarkerSetObjectTagEXT = unsafe extern "system" fn( device: crate::vk::Device, p_tag_info: *const DebugMarkerObjectTagInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDebugMarkerSetObjectNameEXT = unsafe extern "system" fn( device: crate::vk::Device, p_name_info: *const DebugMarkerObjectNameInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdDebugMarkerBeginEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDebugMarkerEndEXT = unsafe extern "system" fn(command_buffer: CommandBuffer); #[allow(non_camel_case_types)] pub type PFN_vkCmdDebugMarkerInsertEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_marker_info: *const DebugMarkerMarkerInfoEXT<'_>, ); pub const KHR_VIDEO_QUEUE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_queue\0") }; pub const KHR_VIDEO_QUEUE_SPEC_VERSION: u32 = 8u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_video_profile: *const VideoProfileInfoKHR<'_>, p_capabilities: *mut VideoCapabilitiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_video_format_info: *const PhysicalDeviceVideoFormatInfoKHR<'_>, p_video_format_property_count: *mut u32, p_video_format_properties: *mut VideoFormatPropertiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateVideoSessionKHR = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const VideoSessionCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_video_session: *mut VideoSessionKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyVideoSessionKHR = unsafe extern "system" fn( device: crate::vk::Device, video_session: VideoSessionKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetVideoSessionMemoryRequirementsKHR = unsafe extern "system" fn( device: crate::vk::Device, video_session: VideoSessionKHR, p_memory_requirements_count: *mut u32, p_memory_requirements: *mut VideoSessionMemoryRequirementsKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkBindVideoSessionMemoryKHR = unsafe extern "system" fn( device: crate::vk::Device, video_session: VideoSessionKHR, bind_session_memory_info_count: u32, p_bind_session_memory_infos: *const BindVideoSessionMemoryInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateVideoSessionParametersKHR = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const VideoSessionParametersCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_video_session_parameters: *mut VideoSessionParametersKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkUpdateVideoSessionParametersKHR = unsafe extern "system" fn( device: crate::vk::Device, video_session_parameters: VideoSessionParametersKHR, p_update_info: *const VideoSessionParametersUpdateInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyVideoSessionParametersKHR = unsafe extern "system" fn( device: crate::vk::Device, video_session_parameters: VideoSessionParametersKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginVideoCodingKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_begin_info: *const VideoBeginCodingInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndVideoCodingKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_end_coding_info: *const VideoEndCodingInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdControlVideoCodingKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_coding_control_info: *const VideoCodingControlInfoKHR<'_>, ); pub const KHR_VIDEO_DECODE_QUEUE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_queue\0") }; pub const KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION: u32 = 8u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDecodeVideoKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_decode_info: *const VideoDecodeInfoKHR<'_>, ); pub const AMD_GCN_SHADER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gcn_shader\0") }; pub const AMD_GCN_SHADER_SPEC_VERSION: u32 = 1u32; pub const NV_DEDICATED_ALLOCATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_dedicated_allocation\0") }; pub const NV_DEDICATED_ALLOCATION_SPEC_VERSION: u32 = 1u32; pub const EXT_TRANSFORM_FEEDBACK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_transform_feedback\0") }; pub const EXT_TRANSFORM_FEEDBACK_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindTransformFeedbackBuffersEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_binding: u32, binding_count: u32, p_buffers: *const Buffer, p_offsets: *const DeviceSize, p_sizes: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginTransformFeedbackEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_counter_buffer: u32, counter_buffer_count: u32, p_counter_buffers: *const Buffer, p_counter_buffer_offsets: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndTransformFeedbackEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_counter_buffer: u32, counter_buffer_count: u32, p_counter_buffers: *const Buffer, p_counter_buffer_offsets: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginQueryIndexedEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, query_pool: QueryPool, query: u32, flags: QueryControlFlags, index: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndQueryIndexedEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, query_pool: QueryPool, query: u32, index: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndirectByteCountEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, instance_count: u32, first_instance: u32, counter_buffer: Buffer, counter_buffer_offset: DeviceSize, counter_offset: u32, vertex_stride: u32, ); pub const NVX_BINARY_IMPORT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NVX_binary_import\0") }; pub const NVX_BINARY_IMPORT_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateCuModuleNVX = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const CuModuleCreateInfoNVX<'_>, p_allocator: *const AllocationCallbacks<'_>, p_module: *mut CuModuleNVX, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateCuFunctionNVX = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const CuFunctionCreateInfoNVX<'_>, p_allocator: *const AllocationCallbacks<'_>, p_function: *mut CuFunctionNVX, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyCuModuleNVX = unsafe extern "system" fn( device: crate::vk::Device, module: CuModuleNVX, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkDestroyCuFunctionNVX = unsafe extern "system" fn( device: crate::vk::Device, function: CuFunctionNVX, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCuLaunchKernelNVX = unsafe extern "system" fn( command_buffer: CommandBuffer, p_launch_info: *const CuLaunchInfoNVX<'_>, ); pub const NVX_IMAGE_VIEW_HANDLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NVX_image_view_handle\0") }; pub const NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetImageViewHandleNVX = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const ImageViewHandleInfoNVX<'_>, ) -> u32; #[allow(non_camel_case_types)] pub type PFN_vkGetImageViewAddressNVX = unsafe extern "system" fn( device: crate::vk::Device, image_view: ImageView, p_properties: *mut ImageViewAddressPropertiesNVX<'_>, ) -> Result; pub const AMD_DRAW_INDIRECT_COUNT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_draw_indirect_count\0") }; pub const AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndirectCount = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, count_buffer: Buffer, count_buffer_offset: DeviceSize, max_draw_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndexedIndirectCount = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, count_buffer: Buffer, count_buffer_offset: DeviceSize, max_draw_count: u32, stride: u32, ); pub const AMD_NEGATIVE_VIEWPORT_HEIGHT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_negative_viewport_height\0") }; pub const AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION: u32 = 1u32; pub const AMD_GPU_SHADER_HALF_FLOAT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gpu_shader_half_float\0") }; pub const AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION: u32 = 2u32; pub const AMD_SHADER_BALLOT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_ballot\0") }; pub const AMD_SHADER_BALLOT_SPEC_VERSION: u32 = 1u32; pub const KHR_VIDEO_ENCODE_H264_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_encode_h264\0") }; pub const KHR_VIDEO_ENCODE_H264_SPEC_VERSION: u32 = 14u32; pub const KHR_VIDEO_ENCODE_H265_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_encode_h265\0") }; pub const KHR_VIDEO_ENCODE_H265_SPEC_VERSION: u32 = 14u32; pub const KHR_VIDEO_DECODE_H264_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_h264\0") }; pub const KHR_VIDEO_DECODE_H264_SPEC_VERSION: u32 = 9u32; pub const AMD_TEXTURE_GATHER_BIAS_LOD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_texture_gather_bias_lod\0") }; pub const AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION: u32 = 1u32; pub const AMD_SHADER_INFO_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_info\0") }; pub const AMD_SHADER_INFO_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetShaderInfoAMD = unsafe extern "system" fn( device: crate::vk::Device, pipeline: Pipeline, shader_stage: ShaderStageFlags, info_type: ShaderInfoTypeAMD, p_info_size: *mut usize, p_info: *mut c_void, ) -> Result; pub const KHR_DYNAMIC_RENDERING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_dynamic_rendering\0") }; pub const KHR_DYNAMIC_RENDERING_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginRendering = unsafe extern "system" fn( command_buffer: CommandBuffer, p_rendering_info: *const RenderingInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndRendering = unsafe extern "system" fn(command_buffer: CommandBuffer); pub const AMD_SHADER_IMAGE_LOAD_STORE_LOD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_image_load_store_lod\0") }; pub const AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION: u32 = 1u32; pub const GGP_STREAM_DESCRIPTOR_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GGP_stream_descriptor_surface\0") }; pub const GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateStreamDescriptorSurfaceGGP = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const StreamDescriptorSurfaceCreateInfoGGP<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const NV_CORNER_SAMPLED_IMAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_corner_sampled_image\0") }; pub const NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION: u32 = 2u32; pub const KHR_MULTIVIEW_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_multiview\0") }; pub const KHR_MULTIVIEW_SPEC_VERSION: u32 = 1u32; pub const IMG_FORMAT_PVRTC_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_IMG_format_pvrtc\0") }; pub const IMG_FORMAT_PVRTC_SPEC_VERSION: u32 = 1u32; pub const NV_EXTERNAL_MEMORY_CAPABILITIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_capabilities\0") }; pub const NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV = unsafe extern "system" fn( physical_device: PhysicalDevice, format: Format, ty: ImageType, tiling: ImageTiling, usage: ImageUsageFlags, flags: ImageCreateFlags, external_handle_type: ExternalMemoryHandleTypeFlagsNV, p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV, ) -> Result; pub const NV_EXTERNAL_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory\0") }; pub const NV_EXTERNAL_MEMORY_SPEC_VERSION: u32 = 1u32; pub const NV_EXTERNAL_MEMORY_WIN32_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_win32\0") }; pub const NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryWin32HandleNV = unsafe extern "system" fn( device: crate::vk::Device, memory: DeviceMemory, handle_type: ExternalMemoryHandleTypeFlagsNV, p_handle: *mut HANDLE, ) -> Result; pub const NV_WIN32_KEYED_MUTEX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_win32_keyed_mutex\0") }; pub const NV_WIN32_KEYED_MUTEX_SPEC_VERSION: u32 = 2u32; pub const KHR_GET_PHYSICAL_DEVICE_PROPERTIES2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_physical_device_properties2\0") }; pub const KHR_GET_PHYSICAL_DEVICE_PROPERTIES2_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceFeatures2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_features: *mut PhysicalDeviceFeatures2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_properties: *mut PhysicalDeviceProperties2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceFormatProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, format: Format, p_format_properties: *mut FormatProperties2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceImageFormatProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_image_format_info: *const PhysicalDeviceImageFormatInfo2<'_>, p_image_format_properties: *mut ImageFormatProperties2<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_queue_family_property_count: *mut u32, p_queue_family_properties: *mut QueueFamilyProperties2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceMemoryProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_memory_properties: *mut PhysicalDeviceMemoryProperties2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 = unsafe extern "system" fn( physical_device: PhysicalDevice, p_format_info: *const PhysicalDeviceSparseImageFormatInfo2<'_>, p_property_count: *mut u32, p_properties: *mut SparseImageFormatProperties2<'_>, ); pub const KHR_DEVICE_GROUP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_device_group\0") }; pub const KHR_DEVICE_GROUP_SPEC_VERSION: u32 = 4u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceGroupPeerMemoryFeatures = unsafe extern "system" fn( device: crate::vk::Device, heap_index: u32, local_device_index: u32, remote_device_index: u32, p_peer_memory_features: *mut PeerMemoryFeatureFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDeviceMask = unsafe extern "system" fn(command_buffer: CommandBuffer, device_mask: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatchBase = unsafe extern "system" fn( command_buffer: CommandBuffer, base_group_x: u32, base_group_y: u32, base_group_z: u32, group_count_x: u32, group_count_y: u32, group_count_z: u32, ); pub const EXT_VALIDATION_FLAGS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_flags\0") }; pub const EXT_VALIDATION_FLAGS_SPEC_VERSION: u32 = 3u32; pub const NN_VI_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NN_vi_surface\0") }; pub const NN_VI_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateViSurfaceNN = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const ViSurfaceCreateInfoNN<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const KHR_SHADER_DRAW_PARAMETERS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_draw_parameters\0") }; pub const KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_SUBGROUP_BALLOT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_subgroup_ballot\0") }; pub const EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_SUBGROUP_VOTE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_subgroup_vote\0") }; pub const EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION: u32 = 1u32; pub const EXT_TEXTURE_COMPRESSION_ASTC_HDR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_texture_compression_astc_hdr\0") }; pub const EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION: u32 = 1u32; pub const EXT_ASTC_DECODE_MODE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_astc_decode_mode\0") }; pub const EXT_ASTC_DECODE_MODE_SPEC_VERSION: u32 = 1u32; pub const EXT_PIPELINE_ROBUSTNESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_robustness\0") }; pub const EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION: u32 = 1u32; pub const KHR_MAINTENANCE1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance1\0") }; pub const KHR_MAINTENANCE1_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkTrimCommandPool = unsafe extern "system" fn( device: crate::vk::Device, command_pool: CommandPool, flags: CommandPoolTrimFlags, ); pub const KHR_DEVICE_GROUP_CREATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_device_group_creation\0") }; pub const KHR_DEVICE_GROUP_CREATION_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkEnumeratePhysicalDeviceGroups = unsafe extern "system" fn( instance: crate::vk::Instance, p_physical_device_group_count: *mut u32, p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties<'_>, ) -> Result; pub const KHR_EXTERNAL_MEMORY_CAPABILITIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory_capabilities\0") }; pub const KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceExternalBufferProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo<'_>, p_external_buffer_properties: *mut ExternalBufferProperties<'_>, ); pub const KHR_EXTERNAL_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory\0") }; pub const KHR_EXTERNAL_MEMORY_SPEC_VERSION: u32 = 1u32; pub const KHR_EXTERNAL_MEMORY_WIN32_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory_win32\0") }; pub const KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryWin32HandleKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR<'_>, p_handle: *mut HANDLE, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryWin32HandlePropertiesKHR = unsafe extern "system" fn( device: crate::vk::Device, handle_type: ExternalMemoryHandleTypeFlags, handle: HANDLE, p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR<'_>, ) -> Result; pub const KHR_EXTERNAL_MEMORY_FD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory_fd\0") }; pub const KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryFdKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_fd_info: *const MemoryGetFdInfoKHR<'_>, p_fd: *mut c_int, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryFdPropertiesKHR = unsafe extern "system" fn( device: crate::vk::Device, handle_type: ExternalMemoryHandleTypeFlags, fd: c_int, p_memory_fd_properties: *mut MemoryFdPropertiesKHR<'_>, ) -> Result; pub const KHR_WIN32_KEYED_MUTEX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_win32_keyed_mutex\0") }; pub const KHR_WIN32_KEYED_MUTEX_SPEC_VERSION: u32 = 1u32; pub const KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore_capabilities\0") }; pub const KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceExternalSemaphoreProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo<'_>, p_external_semaphore_properties: *mut ExternalSemaphoreProperties<'_>, ); pub const KHR_EXTERNAL_SEMAPHORE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore\0") }; pub const KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION: u32 = 1u32; pub const KHR_EXTERNAL_SEMAPHORE_WIN32_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore_win32\0") }; pub const KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkImportSemaphoreWin32HandleKHR = unsafe extern "system" fn( device: crate::vk::Device, p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSemaphoreWin32HandleKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR<'_>, p_handle: *mut HANDLE, ) -> Result; pub const KHR_EXTERNAL_SEMAPHORE_FD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore_fd\0") }; pub const KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkImportSemaphoreFdKHR = unsafe extern "system" fn( device: crate::vk::Device, p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSemaphoreFdKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_fd_info: *const SemaphoreGetFdInfoKHR<'_>, p_fd: *mut c_int, ) -> Result; pub const KHR_PUSH_DESCRIPTOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_push_descriptor\0") }; pub const KHR_PUSH_DESCRIPTOR_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdPushDescriptorSetKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, layout: PipelineLayout, set: u32, descriptor_write_count: u32, p_descriptor_writes: *const WriteDescriptorSet<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPushDescriptorSetWithTemplateKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, descriptor_update_template: DescriptorUpdateTemplate, layout: PipelineLayout, set: u32, p_data: *const c_void, ); pub const EXT_CONDITIONAL_RENDERING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_conditional_rendering\0") }; pub const EXT_CONDITIONAL_RENDERING_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginConditionalRenderingEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndConditionalRenderingEXT = unsafe extern "system" fn(command_buffer: CommandBuffer); pub const KHR_SHADER_FLOAT16_INT8_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_float16_int8\0") }; pub const KHR_SHADER_FLOAT16_INT8_SPEC_VERSION: u32 = 1u32; pub const KHR_16BIT_STORAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_16bit_storage\0") }; pub const KHR_16BIT_STORAGE_SPEC_VERSION: u32 = 1u32; pub const KHR_INCREMENTAL_PRESENT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_incremental_present\0") }; pub const KHR_INCREMENTAL_PRESENT_SPEC_VERSION: u32 = 2u32; pub const KHR_DESCRIPTOR_UPDATE_TEMPLATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_descriptor_update_template\0") }; pub const KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateDescriptorUpdateTemplate = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const DescriptorUpdateTemplateCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_descriptor_update_template: *mut DescriptorUpdateTemplate, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDescriptorUpdateTemplate = unsafe extern "system" fn( device: crate::vk::Device, descriptor_update_template: DescriptorUpdateTemplate, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkUpdateDescriptorSetWithTemplate = unsafe extern "system" fn( device: crate::vk::Device, descriptor_set: DescriptorSet, descriptor_update_template: DescriptorUpdateTemplate, p_data: *const c_void, ); pub const NV_CLIP_SPACE_W_SCALING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_clip_space_w_scaling\0") }; pub const NV_CLIP_SPACE_W_SCALING_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewportWScalingNV = unsafe extern "system" fn( command_buffer: CommandBuffer, first_viewport: u32, viewport_count: u32, p_viewport_w_scalings: *const ViewportWScalingNV, ); pub const EXT_DIRECT_MODE_DISPLAY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_direct_mode_display\0") }; pub const EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkReleaseDisplayEXT = unsafe extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result; pub const EXT_ACQUIRE_XLIB_DISPLAY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_acquire_xlib_display\0") }; pub const EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkAcquireXlibDisplayEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, dpy: *mut Display, display: DisplayKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetRandROutputDisplayEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, dpy: *mut Display, rr_output: RROutput, p_display: *mut DisplayKHR, ) -> Result; pub const EXT_DISPLAY_SURFACE_COUNTER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_display_surface_counter\0") }; pub const EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT = unsafe extern "system" fn( physical_device: PhysicalDevice, surface: SurfaceKHR, p_surface_capabilities: *mut SurfaceCapabilities2EXT<'_>, ) -> Result; pub const EXT_DISPLAY_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_display_control\0") }; pub const EXT_DISPLAY_CONTROL_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkDisplayPowerControlEXT = unsafe extern "system" fn( device: crate::vk::Device, display: DisplayKHR, p_display_power_info: *const DisplayPowerInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkRegisterDeviceEventEXT = unsafe extern "system" fn( device: crate::vk::Device, p_device_event_info: *const DeviceEventInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_fence: *mut Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkRegisterDisplayEventEXT = unsafe extern "system" fn( device: crate::vk::Device, display: DisplayKHR, p_display_event_info: *const DisplayEventInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_fence: *mut Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSwapchainCounterEXT = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, counter: SurfaceCounterFlagsEXT, p_counter_value: *mut u64, ) -> Result; pub const GOOGLE_DISPLAY_TIMING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_display_timing\0") }; pub const GOOGLE_DISPLAY_TIMING_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetRefreshCycleDurationGOOGLE = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_display_timing_properties: *mut RefreshCycleDurationGOOGLE, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPastPresentationTimingGOOGLE = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_presentation_timing_count: *mut u32, p_presentation_timings: *mut PastPresentationTimingGOOGLE, ) -> Result; pub const NV_SAMPLE_MASK_OVERRIDE_COVERAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_sample_mask_override_coverage\0") }; pub const NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION: u32 = 1u32; pub const NV_GEOMETRY_SHADER_PASSTHROUGH_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_geometry_shader_passthrough\0") }; pub const NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION: u32 = 1u32; pub const NV_VIEWPORT_ARRAY2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_viewport_array2\0") }; pub const NV_VIEWPORT_ARRAY2_SPEC_VERSION: u32 = 1u32; pub const NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NVX_multiview_per_view_attributes\0") }; pub const NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION: u32 = 1u32; pub const NV_VIEWPORT_SWIZZLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_viewport_swizzle\0") }; pub const NV_VIEWPORT_SWIZZLE_SPEC_VERSION: u32 = 1u32; pub const EXT_DISCARD_RECTANGLES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_discard_rectangles\0") }; pub const EXT_DISCARD_RECTANGLES_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDiscardRectangleEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_discard_rectangle: u32, discard_rectangle_count: u32, p_discard_rectangles: *const Rect2D, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDiscardRectangleEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, discard_rectangle_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDiscardRectangleModeEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, discard_rectangle_mode: DiscardRectangleModeEXT, ); pub const EXT_CONSERVATIVE_RASTERIZATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_conservative_rasterization\0") }; pub const EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION: u32 = 1u32; pub const EXT_DEPTH_CLIP_ENABLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clip_enable\0") }; pub const EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION: u32 = 1u32; pub const EXT_SWAPCHAIN_COLORSPACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_swapchain_colorspace\0") }; pub const EXT_SWAPCHAIN_COLORSPACE_SPEC_VERSION: u32 = 4u32; pub const EXT_HDR_METADATA_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_hdr_metadata\0") }; pub const EXT_HDR_METADATA_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkSetHdrMetadataEXT = unsafe extern "system" fn( device: crate::vk::Device, swapchain_count: u32, p_swapchains: *const SwapchainKHR, p_metadata: *const HdrMetadataEXT<'_>, ); pub const KHR_IMAGELESS_FRAMEBUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_imageless_framebuffer\0") }; pub const KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION: u32 = 1u32; pub const KHR_CREATE_RENDERPASS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_create_renderpass2\0") }; pub const KHR_CREATE_RENDERPASS2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateRenderPass2 = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const RenderPassCreateInfo2<'_>, p_allocator: *const AllocationCallbacks<'_>, p_render_pass: *mut RenderPass, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginRenderPass2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_render_pass_begin: *const RenderPassBeginInfo<'_>, p_subpass_begin_info: *const SubpassBeginInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdNextSubpass2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_subpass_begin_info: *const SubpassBeginInfo<'_>, p_subpass_end_info: *const SubpassEndInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndRenderPass2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_subpass_end_info: *const SubpassEndInfo<'_>, ); pub const IMG_RELAXED_LINE_RASTERIZATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_IMG_relaxed_line_rasterization\0") }; pub const IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION: u32 = 1u32; pub const KHR_SHARED_PRESENTABLE_IMAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shared_presentable_image\0") }; pub const KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetSwapchainStatusKHR = unsafe extern "system" fn(device: crate::vk::Device, swapchain: SwapchainKHR) -> Result; pub const KHR_EXTERNAL_FENCE_CAPABILITIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_capabilities\0") }; pub const KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceExternalFenceProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_external_fence_info: *const PhysicalDeviceExternalFenceInfo<'_>, p_external_fence_properties: *mut ExternalFenceProperties<'_>, ); pub const KHR_EXTERNAL_FENCE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence\0") }; pub const KHR_EXTERNAL_FENCE_SPEC_VERSION: u32 = 1u32; pub const KHR_EXTERNAL_FENCE_WIN32_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_win32\0") }; pub const KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkImportFenceWin32HandleKHR = unsafe extern "system" fn( device: crate::vk::Device, p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetFenceWin32HandleKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR<'_>, p_handle: *mut HANDLE, ) -> Result; pub const KHR_EXTERNAL_FENCE_FD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_fd\0") }; pub const KHR_EXTERNAL_FENCE_FD_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkImportFenceFdKHR = unsafe extern "system" fn( device: crate::vk::Device, p_import_fence_fd_info: *const ImportFenceFdInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetFenceFdKHR = unsafe extern "system" fn( device: crate::vk::Device, p_get_fd_info: *const FenceGetFdInfoKHR<'_>, p_fd: *mut c_int, ) -> Result; pub const KHR_PERFORMANCE_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_performance_query\0") }; pub const KHR_PERFORMANCE_QUERY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, p_counter_count: *mut u32, p_counters: *mut PerformanceCounterKHR<'_>, p_counter_descriptions: *mut PerformanceCounterDescriptionKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_performance_query_create_info: *const QueryPoolPerformanceCreateInfoKHR<'_>, p_num_passes: *mut u32, ); #[allow(non_camel_case_types)] pub type PFN_vkAcquireProfilingLockKHR = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const AcquireProfilingLockInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkReleaseProfilingLockKHR = unsafe extern "system" fn(device: crate::vk::Device); pub const KHR_MAINTENANCE2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance2\0") }; pub const KHR_MAINTENANCE2_SPEC_VERSION: u32 = 1u32; pub const KHR_GET_SURFACE_CAPABILITIES2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_surface_capabilities2\0") }; pub const KHR_GET_SURFACE_CAPABILITIES2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, p_surface_capabilities: *mut SurfaceCapabilities2KHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfaceFormats2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, p_surface_format_count: *mut u32, p_surface_formats: *mut SurfaceFormat2KHR<'_>, ) -> Result; pub const KHR_VARIABLE_POINTERS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_variable_pointers\0") }; pub const KHR_VARIABLE_POINTERS_SPEC_VERSION: u32 = 1u32; pub const KHR_GET_DISPLAY_PROPERTIES2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_display_properties2\0") }; pub const KHR_GET_DISPLAY_PROPERTIES2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceDisplayProperties2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut DisplayProperties2KHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut DisplayPlaneProperties2KHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDisplayModeProperties2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, display: DisplayKHR, p_property_count: *mut u32, p_properties: *mut DisplayModeProperties2KHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDisplayPlaneCapabilities2KHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_display_plane_info: *const DisplayPlaneInfo2KHR<'_>, p_capabilities: *mut DisplayPlaneCapabilities2KHR<'_>, ) -> Result; pub const MVK_IOS_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_MVK_ios_surface\0") }; pub const MVK_IOS_SURFACE_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateIOSSurfaceMVK = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const IOSSurfaceCreateInfoMVK<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const MVK_MACOS_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_MVK_macos_surface\0") }; pub const MVK_MACOS_SURFACE_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateMacOSSurfaceMVK = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const MacOSSurfaceCreateInfoMVK<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const EXT_EXTERNAL_MEMORY_DMA_BUF_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_external_memory_dma_buf\0") }; pub const EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION: u32 = 1u32; pub const EXT_QUEUE_FAMILY_FOREIGN_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_queue_family_foreign\0") }; pub const EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION: u32 = 1u32; pub const KHR_DEDICATED_ALLOCATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_dedicated_allocation\0") }; pub const KHR_DEDICATED_ALLOCATION_SPEC_VERSION: u32 = 3u32; pub const EXT_DEBUG_UTILS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_utils\0") }; pub const EXT_DEBUG_UTILS_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateDebugUtilsMessengerEXT = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const DebugUtilsMessengerCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_messenger: *mut DebugUtilsMessengerEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDebugUtilsMessengerEXT = unsafe extern "system" fn( instance: crate::vk::Instance, messenger: DebugUtilsMessengerEXT, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkSubmitDebugUtilsMessageEXT = unsafe extern "system" fn( instance: crate::vk::Instance, message_severity: DebugUtilsMessageSeverityFlagsEXT, message_types: DebugUtilsMessageTypeFlagsEXT, p_callback_data: *const DebugUtilsMessengerCallbackDataEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkSetDebugUtilsObjectNameEXT = unsafe extern "system" fn( device: crate::vk::Device, p_name_info: *const DebugUtilsObjectNameInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSetDebugUtilsObjectTagEXT = unsafe extern "system" fn( device: crate::vk::Device, p_tag_info: *const DebugUtilsObjectTagInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkQueueBeginDebugUtilsLabelEXT = unsafe extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT<'_>); #[allow(non_camel_case_types)] pub type PFN_vkQueueEndDebugUtilsLabelEXT = unsafe extern "system" fn(queue: Queue); #[allow(non_camel_case_types)] pub type PFN_vkQueueInsertDebugUtilsLabelEXT = unsafe extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT<'_>); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginDebugUtilsLabelEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_label_info: *const DebugUtilsLabelEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndDebugUtilsLabelEXT = unsafe extern "system" fn(command_buffer: CommandBuffer); #[allow(non_camel_case_types)] pub type PFN_vkCmdInsertDebugUtilsLabelEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_label_info: *const DebugUtilsLabelEXT<'_>, ); pub const ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ANDROID_external_memory_android_hardware_buffer\0") }; pub const ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION: u32 = 5u32; #[allow(non_camel_case_types)] pub type PFN_vkGetAndroidHardwareBufferPropertiesANDROID = unsafe extern "system" fn( device: crate::vk::Device, buffer: *const AHardwareBuffer, p_properties: *mut AndroidHardwareBufferPropertiesANDROID<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryAndroidHardwareBufferANDROID = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID<'_>, p_buffer: *mut *mut AHardwareBuffer, ) -> Result; pub const EXT_SAMPLER_FILTER_MINMAX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_sampler_filter_minmax\0") }; pub const EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION: u32 = 2u32; pub const KHR_STORAGE_BUFFER_STORAGE_CLASS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_storage_buffer_storage_class\0") }; pub const KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION: u32 = 1u32; pub const AMD_GPU_SHADER_INT16_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gpu_shader_int16\0") }; pub const AMD_GPU_SHADER_INT16_SPEC_VERSION: u32 = 2u32; pub const AMDX_SHADER_ENQUEUE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMDX_shader_enqueue\0") }; pub const AMDX_SHADER_ENQUEUE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateExecutionGraphPipelinesAMDX = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, create_info_count: u32, p_create_infos: *const ExecutionGraphPipelineCreateInfoAMDX<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipelines: *mut Pipeline, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetExecutionGraphPipelineScratchSizeAMDX = unsafe extern "system" fn( device: crate::vk::Device, execution_graph: Pipeline, p_size_info: *mut ExecutionGraphPipelineScratchSizeAMDX<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetExecutionGraphPipelineNodeIndexAMDX = unsafe extern "system" fn( device: crate::vk::Device, execution_graph: Pipeline, p_node_info: *const PipelineShaderStageNodeCreateInfoAMDX<'_>, p_node_index: *mut u32, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdInitializeGraphScratchMemoryAMDX = unsafe extern "system" fn(command_buffer: CommandBuffer, scratch: DeviceAddress); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatchGraphAMDX = unsafe extern "system" fn( command_buffer: CommandBuffer, scratch: DeviceAddress, p_count_info: *const DispatchGraphCountInfoAMDX, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatchGraphIndirectAMDX = unsafe extern "system" fn( command_buffer: CommandBuffer, scratch: DeviceAddress, p_count_info: *const DispatchGraphCountInfoAMDX, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatchGraphIndirectCountAMDX = unsafe extern "system" fn( command_buffer: CommandBuffer, scratch: DeviceAddress, count_info: DeviceAddress, ); pub const AMD_MIXED_ATTACHMENT_SAMPLES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_mixed_attachment_samples\0") }; pub const AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION: u32 = 1u32; pub const AMD_SHADER_FRAGMENT_MASK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_fragment_mask\0") }; pub const AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION: u32 = 1u32; pub const EXT_INLINE_UNIFORM_BLOCK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_inline_uniform_block\0") }; pub const EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_STENCIL_EXPORT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_stencil_export\0") }; pub const EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION: u32 = 1u32; pub const EXT_SAMPLE_LOCATIONS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_sample_locations\0") }; pub const EXT_SAMPLE_LOCATIONS_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, samples: SampleCountFlags, p_multisample_properties: *mut MultisamplePropertiesEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetSampleLocationsEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_sample_locations_info: *const SampleLocationsInfoEXT<'_>, ); pub const KHR_RELAXED_BLOCK_LAYOUT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_relaxed_block_layout\0") }; pub const KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION: u32 = 1u32; pub const KHR_GET_MEMORY_REQUIREMENTS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_memory_requirements2\0") }; pub const KHR_GET_MEMORY_REQUIREMENTS2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetImageMemoryRequirements2 = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const ImageMemoryRequirementsInfo2<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetBufferMemoryRequirements2 = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const BufferMemoryRequirementsInfo2<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetImageSparseMemoryRequirements2 = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const ImageSparseMemoryRequirementsInfo2<'_>, p_sparse_memory_requirement_count: *mut u32, p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ); pub const KHR_IMAGE_FORMAT_LIST_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_image_format_list\0") }; pub const KHR_IMAGE_FORMAT_LIST_SPEC_VERSION: u32 = 1u32; pub const EXT_BLEND_OPERATION_ADVANCED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_blend_operation_advanced\0") }; pub const EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION: u32 = 2u32; pub const NV_FRAGMENT_COVERAGE_TO_COLOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_coverage_to_color\0") }; pub const NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION: u32 = 1u32; pub const KHR_ACCELERATION_STRUCTURE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_acceleration_structure\0") }; pub const KHR_ACCELERATION_STRUCTURE_SPEC_VERSION: u32 = 13u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateAccelerationStructureKHR = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const AccelerationStructureCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_acceleration_structure: *mut AccelerationStructureKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyAccelerationStructureKHR = unsafe extern "system" fn( device: crate::vk::Device, acceleration_structure: AccelerationStructureKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBuildAccelerationStructuresKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, info_count: u32, p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBuildAccelerationStructuresIndirectKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, info_count: u32, p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, p_indirect_device_addresses: *const DeviceAddress, p_indirect_strides: *const u32, pp_max_primitive_counts: *const *const u32, ); #[allow(non_camel_case_types)] pub type PFN_vkBuildAccelerationStructuresKHR = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, info_count: u32, p_infos: *const AccelerationStructureBuildGeometryInfoKHR<'_>, pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyAccelerationStructureKHR = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyAccelerationStructureInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyAccelerationStructureToMemoryKHR = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyMemoryToAccelerationStructureKHR = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkWriteAccelerationStructuresPropertiesKHR = unsafe extern "system" fn( device: crate::vk::Device, acceleration_structure_count: u32, p_acceleration_structures: *const AccelerationStructureKHR, query_type: QueryType, data_size: usize, p_data: *mut c_void, stride: usize, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyAccelerationStructureKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyAccelerationStructureInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyAccelerationStructureToMemoryKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyAccelerationStructureToMemoryInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMemoryToAccelerationStructureKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyMemoryToAccelerationStructureInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetAccelerationStructureDeviceAddressKHR = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const AccelerationStructureDeviceAddressInfoKHR<'_>, ) -> DeviceAddress; #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteAccelerationStructuresPropertiesKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, acceleration_structure_count: u32, p_acceleration_structures: *const AccelerationStructureKHR, query_type: QueryType, query_pool: QueryPool, first_query: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceAccelerationStructureCompatibilityKHR = unsafe extern "system" fn( device: crate::vk::Device, p_version_info: *const AccelerationStructureVersionInfoKHR<'_>, p_compatibility: *mut AccelerationStructureCompatibilityKHR, ); #[allow(non_camel_case_types)] pub type PFN_vkGetAccelerationStructureBuildSizesKHR = unsafe extern "system" fn( device: crate::vk::Device, build_type: AccelerationStructureBuildTypeKHR, p_build_info: *const AccelerationStructureBuildGeometryInfoKHR<'_>, p_max_primitive_counts: *const u32, p_size_info: *mut AccelerationStructureBuildSizesInfoKHR<'_>, ); pub const KHR_RAY_TRACING_PIPELINE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_tracing_pipeline\0") }; pub const KHR_RAY_TRACING_PIPELINE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdTraceRaysKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR, width: u32, height: u32, depth: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateRayTracingPipelinesKHR = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, pipeline_cache: PipelineCache, create_info_count: u32, p_create_infos: *const RayTracingPipelineCreateInfoKHR<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipelines: *mut Pipeline, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetRayTracingShaderGroupHandlesKHR = unsafe extern "system" fn( device: crate::vk::Device, pipeline: Pipeline, first_group: u32, group_count: u32, data_size: usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = unsafe extern "system" fn( device: crate::vk::Device, pipeline: Pipeline, first_group: u32, group_count: u32, data_size: usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdTraceRaysIndirectKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR, p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR, indirect_device_address: DeviceAddress, ); #[allow(non_camel_case_types)] pub type PFN_vkGetRayTracingShaderGroupStackSizeKHR = unsafe extern "system" fn( device: crate::vk::Device, pipeline: Pipeline, group: u32, group_shader: ShaderGroupShaderKHR, ) -> DeviceSize; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRayTracingPipelineStackSizeKHR = unsafe extern "system" fn(command_buffer: CommandBuffer, pipeline_stack_size: u32); pub const KHR_RAY_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_query\0") }; pub const KHR_RAY_QUERY_SPEC_VERSION: u32 = 1u32; pub const NV_FRAMEBUFFER_MIXED_SAMPLES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_framebuffer_mixed_samples\0") }; pub const NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION: u32 = 1u32; pub const NV_FILL_RECTANGLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_fill_rectangle\0") }; pub const NV_FILL_RECTANGLE_SPEC_VERSION: u32 = 1u32; pub const NV_SHADER_SM_BUILTINS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_sm_builtins\0") }; pub const NV_SHADER_SM_BUILTINS_SPEC_VERSION: u32 = 1u32; pub const EXT_POST_DEPTH_COVERAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_post_depth_coverage\0") }; pub const EXT_POST_DEPTH_COVERAGE_SPEC_VERSION: u32 = 1u32; pub const KHR_SAMPLER_YCBCR_CONVERSION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_sampler_ycbcr_conversion\0") }; pub const KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION: u32 = 14u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateSamplerYcbcrConversion = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const SamplerYcbcrConversionCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_ycbcr_conversion: *mut SamplerYcbcrConversion, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroySamplerYcbcrConversion = unsafe extern "system" fn( device: crate::vk::Device, ycbcr_conversion: SamplerYcbcrConversion, p_allocator: *const AllocationCallbacks<'_>, ); pub const KHR_BIND_MEMORY2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_bind_memory2\0") }; pub const KHR_BIND_MEMORY2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkBindBufferMemory2 = unsafe extern "system" fn( device: crate::vk::Device, bind_info_count: u32, p_bind_infos: *const BindBufferMemoryInfo<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkBindImageMemory2 = unsafe extern "system" fn( device: crate::vk::Device, bind_info_count: u32, p_bind_infos: *const BindImageMemoryInfo<'_>, ) -> Result; pub const EXT_IMAGE_DRM_FORMAT_MODIFIER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_drm_format_modifier\0") }; pub const EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetImageDrmFormatModifierPropertiesEXT = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_properties: *mut ImageDrmFormatModifierPropertiesEXT<'_>, ) -> Result; pub const EXT_VALIDATION_CACHE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_cache\0") }; pub const EXT_VALIDATION_CACHE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateValidationCacheEXT = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ValidationCacheCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_validation_cache: *mut ValidationCacheEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyValidationCacheEXT = unsafe extern "system" fn( device: crate::vk::Device, validation_cache: ValidationCacheEXT, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkMergeValidationCachesEXT = unsafe extern "system" fn( device: crate::vk::Device, dst_cache: ValidationCacheEXT, src_cache_count: u32, p_src_caches: *const ValidationCacheEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetValidationCacheDataEXT = unsafe extern "system" fn( device: crate::vk::Device, validation_cache: ValidationCacheEXT, p_data_size: *mut usize, p_data: *mut c_void, ) -> Result; pub const EXT_DESCRIPTOR_INDEXING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_descriptor_indexing\0") }; pub const EXT_DESCRIPTOR_INDEXING_SPEC_VERSION: u32 = 2u32; pub const EXT_SHADER_VIEWPORT_INDEX_LAYER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_viewport_index_layer\0") }; pub const EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION: u32 = 1u32; pub const KHR_PORTABILITY_SUBSET_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_portability_subset\0") }; pub const KHR_PORTABILITY_SUBSET_SPEC_VERSION: u32 = 1u32; pub const NV_SHADING_RATE_IMAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_shading_rate_image\0") }; pub const NV_SHADING_RATE_IMAGE_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindShadingRateImageNV = unsafe extern "system" fn( command_buffer: CommandBuffer, image_view: ImageView, image_layout: ImageLayout, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewportShadingRatePaletteNV = unsafe extern "system" fn( command_buffer: CommandBuffer, first_viewport: u32, viewport_count: u32, p_shading_rate_palettes: *const ShadingRatePaletteNV<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoarseSampleOrderNV = unsafe extern "system" fn( command_buffer: CommandBuffer, sample_order_type: CoarseSampleOrderTypeNV, custom_sample_order_count: u32, p_custom_sample_orders: *const CoarseSampleOrderCustomNV<'_>, ); pub const NV_RAY_TRACING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing\0") }; pub const NV_RAY_TRACING_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateAccelerationStructureNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const AccelerationStructureCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_acceleration_structure: *mut AccelerationStructureNV, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyAccelerationStructureNV = unsafe extern "system" fn( device: crate::vk::Device, acceleration_structure: AccelerationStructureNV, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetAccelerationStructureMemoryRequirementsNV = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const AccelerationStructureMemoryRequirementsInfoNV<'_>, p_memory_requirements: *mut MemoryRequirements2KHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkBindAccelerationStructureMemoryNV = unsafe extern "system" fn( device: crate::vk::Device, bind_info_count: u32, p_bind_infos: *const BindAccelerationStructureMemoryInfoNV<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdBuildAccelerationStructureNV = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const AccelerationStructureInfoNV<'_>, instance_data: Buffer, instance_offset: DeviceSize, update: Bool32, dst: AccelerationStructureNV, src: AccelerationStructureNV, scratch: Buffer, scratch_offset: DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyAccelerationStructureNV = unsafe extern "system" fn( command_buffer: CommandBuffer, dst: AccelerationStructureNV, src: AccelerationStructureNV, mode: CopyAccelerationStructureModeKHR, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdTraceRaysNV = unsafe extern "system" fn( command_buffer: CommandBuffer, raygen_shader_binding_table_buffer: Buffer, raygen_shader_binding_offset: DeviceSize, miss_shader_binding_table_buffer: Buffer, miss_shader_binding_offset: DeviceSize, miss_shader_binding_stride: DeviceSize, hit_shader_binding_table_buffer: Buffer, hit_shader_binding_offset: DeviceSize, hit_shader_binding_stride: DeviceSize, callable_shader_binding_table_buffer: Buffer, callable_shader_binding_offset: DeviceSize, callable_shader_binding_stride: DeviceSize, width: u32, height: u32, depth: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateRayTracingPipelinesNV = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, create_info_count: u32, p_create_infos: *const RayTracingPipelineCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipelines: *mut Pipeline, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetAccelerationStructureHandleNV = unsafe extern "system" fn( device: crate::vk::Device, acceleration_structure: AccelerationStructureNV, data_size: usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteAccelerationStructuresPropertiesNV = unsafe extern "system" fn( command_buffer: CommandBuffer, acceleration_structure_count: u32, p_acceleration_structures: *const AccelerationStructureNV, query_type: QueryType, query_pool: QueryPool, first_query: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCompileDeferredNV = unsafe extern "system" fn(device: crate::vk::Device, pipeline: Pipeline, shader: u32) -> Result; pub const NV_REPRESENTATIVE_FRAGMENT_TEST_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_representative_fragment_test\0") }; pub const NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION: u32 = 2u32; pub const KHR_MAINTENANCE3_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance3\0") }; pub const KHR_MAINTENANCE3_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorSetLayoutSupport = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const DescriptorSetLayoutCreateInfo<'_>, p_support: *mut DescriptorSetLayoutSupport<'_>, ); pub const KHR_DRAW_INDIRECT_COUNT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_draw_indirect_count\0") }; pub const KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION: u32 = 1u32; pub const EXT_FILTER_CUBIC_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_filter_cubic\0") }; pub const EXT_FILTER_CUBIC_SPEC_VERSION: u32 = 3u32; pub const QCOM_RENDER_PASS_SHADER_RESOLVE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_shader_resolve\0") }; pub const QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION: u32 = 4u32; pub const EXT_GLOBAL_PRIORITY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_global_priority\0") }; pub const EXT_GLOBAL_PRIORITY_SPEC_VERSION: u32 = 2u32; pub const KHR_SHADER_SUBGROUP_EXTENDED_TYPES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_subgroup_extended_types\0") }; pub const KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION: u32 = 1u32; pub const KHR_8BIT_STORAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_8bit_storage\0") }; pub const KHR_8BIT_STORAGE_SPEC_VERSION: u32 = 1u32; pub const EXT_EXTERNAL_MEMORY_HOST_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_external_memory_host\0") }; pub const EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryHostPointerPropertiesEXT = unsafe extern "system" fn( device: crate::vk::Device, handle_type: ExternalMemoryHandleTypeFlags, p_host_pointer: *const c_void, p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT<'_>, ) -> Result; pub const AMD_BUFFER_MARKER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_buffer_marker\0") }; pub const AMD_BUFFER_MARKER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteBufferMarkerAMD = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_stage: PipelineStageFlags, dst_buffer: Buffer, dst_offset: DeviceSize, marker: u32, ); pub const KHR_SHADER_ATOMIC_INT64_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_atomic_int64\0") }; pub const KHR_SHADER_ATOMIC_INT64_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_CLOCK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_clock\0") }; pub const KHR_SHADER_CLOCK_SPEC_VERSION: u32 = 1u32; pub const AMD_PIPELINE_COMPILER_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_pipeline_compiler_control\0") }; pub const AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION: u32 = 1u32; pub const EXT_CALIBRATED_TIMESTAMPS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_calibrated_timestamps\0") }; pub const EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_time_domain_count: *mut u32, p_time_domains: *mut TimeDomainKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetCalibratedTimestampsKHR = unsafe extern "system" fn( device: crate::vk::Device, timestamp_count: u32, p_timestamp_infos: *const CalibratedTimestampInfoKHR<'_>, p_timestamps: *mut u64, p_max_deviation: *mut u64, ) -> Result; pub const AMD_SHADER_CORE_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_core_properties\0") }; pub const AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION: u32 = 2u32; pub const KHR_VIDEO_DECODE_H265_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_h265\0") }; pub const KHR_VIDEO_DECODE_H265_SPEC_VERSION: u32 = 8u32; pub const KHR_GLOBAL_PRIORITY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_global_priority\0") }; pub const KHR_GLOBAL_PRIORITY_SPEC_VERSION: u32 = 1u32; pub const AMD_MEMORY_OVERALLOCATION_BEHAVIOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_memory_overallocation_behavior\0") }; pub const AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION: u32 = 1u32; pub const EXT_VERTEX_ATTRIBUTE_DIVISOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_vertex_attribute_divisor\0") }; pub const EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION: u32 = 3u32; pub const GGP_FRAME_TOKEN_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GGP_frame_token\0") }; pub const GGP_FRAME_TOKEN_SPEC_VERSION: u32 = 1u32; pub const EXT_PIPELINE_CREATION_FEEDBACK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_creation_feedback\0") }; pub const EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION: u32 = 1u32; pub const KHR_DRIVER_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_driver_properties\0") }; pub const KHR_DRIVER_PROPERTIES_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_FLOAT_CONTROLS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_float_controls\0") }; pub const KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION: u32 = 4u32; pub const NV_SHADER_SUBGROUP_PARTITIONED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_subgroup_partitioned\0") }; pub const NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION: u32 = 1u32; pub const KHR_DEPTH_STENCIL_RESOLVE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_depth_stencil_resolve\0") }; pub const KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION: u32 = 1u32; pub const KHR_SWAPCHAIN_MUTABLE_FORMAT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_swapchain_mutable_format\0") }; pub const KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION: u32 = 1u32; pub const NV_COMPUTE_SHADER_DERIVATIVES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_compute_shader_derivatives\0") }; pub const NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION: u32 = 1u32; pub const NV_MESH_SHADER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_mesh_shader\0") }; pub const NV_MESH_SHADER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksNV = unsafe extern "system" fn(command_buffer: CommandBuffer, task_count: u32, first_task: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksIndirectNV = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, draw_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksIndirectCountNV = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, count_buffer: Buffer, count_buffer_offset: DeviceSize, max_draw_count: u32, stride: u32, ); pub const NV_FRAGMENT_SHADER_BARYCENTRIC_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_shader_barycentric\0") }; pub const NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION: u32 = 1u32; pub const NV_SHADER_IMAGE_FOOTPRINT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_image_footprint\0") }; pub const NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION: u32 = 2u32; pub const NV_SCISSOR_EXCLUSIVE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_scissor_exclusive\0") }; pub const NV_SCISSOR_EXCLUSIVE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetExclusiveScissorEnableNV = unsafe extern "system" fn( command_buffer: CommandBuffer, first_exclusive_scissor: u32, exclusive_scissor_count: u32, p_exclusive_scissor_enables: *const Bool32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetExclusiveScissorNV = unsafe extern "system" fn( command_buffer: CommandBuffer, first_exclusive_scissor: u32, exclusive_scissor_count: u32, p_exclusive_scissors: *const Rect2D, ); pub const NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_diagnostic_checkpoints\0") }; pub const NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCheckpointNV = unsafe extern "system" fn(command_buffer: CommandBuffer, p_checkpoint_marker: *const c_void); #[allow(non_camel_case_types)] pub type PFN_vkGetQueueCheckpointDataNV = unsafe extern "system" fn( queue: Queue, p_checkpoint_data_count: *mut u32, p_checkpoint_data: *mut CheckpointDataNV<'_>, ); pub const KHR_TIMELINE_SEMAPHORE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_timeline_semaphore\0") }; pub const KHR_TIMELINE_SEMAPHORE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetSemaphoreCounterValue = unsafe extern "system" fn( device: crate::vk::Device, semaphore: Semaphore, p_value: *mut u64, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkWaitSemaphores = unsafe extern "system" fn( device: crate::vk::Device, p_wait_info: *const SemaphoreWaitInfo<'_>, timeout: u64, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSignalSemaphore = unsafe extern "system" fn( device: crate::vk::Device, p_signal_info: *const SemaphoreSignalInfo<'_>, ) -> Result; pub const INTEL_SHADER_INTEGER_FUNCTIONS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_shader_integer_functions2\0") }; pub const INTEL_SHADER_INTEGER_FUNCTIONS2_SPEC_VERSION: u32 = 1u32; pub const INTEL_PERFORMANCE_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_performance_query\0") }; pub const INTEL_PERFORMANCE_QUERY_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkInitializePerformanceApiINTEL = unsafe extern "system" fn( device: crate::vk::Device, p_initialize_info: *const InitializePerformanceApiInfoINTEL<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkUninitializePerformanceApiINTEL = unsafe extern "system" fn(device: crate::vk::Device); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPerformanceMarkerINTEL = unsafe extern "system" fn( command_buffer: CommandBuffer, p_marker_info: *const PerformanceMarkerInfoINTEL<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPerformanceStreamMarkerINTEL = unsafe extern "system" fn( command_buffer: CommandBuffer, p_marker_info: *const PerformanceStreamMarkerInfoINTEL<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPerformanceOverrideINTEL = unsafe extern "system" fn( command_buffer: CommandBuffer, p_override_info: *const PerformanceOverrideInfoINTEL<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAcquirePerformanceConfigurationINTEL = unsafe extern "system" fn( device: crate::vk::Device, p_acquire_info: *const PerformanceConfigurationAcquireInfoINTEL<'_>, p_configuration: *mut PerformanceConfigurationINTEL, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkReleasePerformanceConfigurationINTEL = unsafe extern "system" fn( device: crate::vk::Device, configuration: PerformanceConfigurationINTEL, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkQueueSetPerformanceConfigurationINTEL = unsafe extern "system" fn(queue: Queue, configuration: PerformanceConfigurationINTEL) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPerformanceParameterINTEL = unsafe extern "system" fn( device: crate::vk::Device, parameter: PerformanceParameterTypeINTEL, p_value: *mut PerformanceValueINTEL, ) -> Result; pub const KHR_VULKAN_MEMORY_MODEL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_vulkan_memory_model\0") }; pub const KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION: u32 = 3u32; pub const EXT_PCI_BUS_INFO_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pci_bus_info\0") }; pub const EXT_PCI_BUS_INFO_SPEC_VERSION: u32 = 2u32; pub const AMD_DISPLAY_NATIVE_HDR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_display_native_hdr\0") }; pub const AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkSetLocalDimmingAMD = unsafe extern "system" fn( device: crate::vk::Device, swap_chain: SwapchainKHR, local_dimming_enable: Bool32, ); pub const FUCHSIA_IMAGEPIPE_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_imagepipe_surface\0") }; pub const FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateImagePipeSurfaceFUCHSIA = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const KHR_SHADER_TERMINATE_INVOCATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_terminate_invocation\0") }; pub const KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION: u32 = 1u32; pub const EXT_METAL_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_metal_surface\0") }; pub const EXT_METAL_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateMetalSurfaceEXT = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const MetalSurfaceCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const EXT_FRAGMENT_DENSITY_MAP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_density_map\0") }; pub const EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION: u32 = 2u32; pub const EXT_SCALAR_BLOCK_LAYOUT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_scalar_block_layout\0") }; pub const EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION: u32 = 1u32; pub const GOOGLE_HLSL_FUNCTIONALITY1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_hlsl_functionality1\0") }; pub const GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION: u32 = 1u32; pub const GOOGLE_DECORATE_STRING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_decorate_string\0") }; pub const GOOGLE_DECORATE_STRING_SPEC_VERSION: u32 = 1u32; pub const EXT_SUBGROUP_SIZE_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_subgroup_size_control\0") }; pub const EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION: u32 = 2u32; pub const KHR_FRAGMENT_SHADING_RATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_fragment_shading_rate\0") }; pub const KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_fragment_shading_rate_count: *mut u32, p_fragment_shading_rates: *mut PhysicalDeviceFragmentShadingRateKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetFragmentShadingRateKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_fragment_size: *const Extent2D, combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize], ); pub const AMD_SHADER_CORE_PROPERTIES2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_core_properties2\0") }; pub const AMD_SHADER_CORE_PROPERTIES2_SPEC_VERSION: u32 = 1u32; pub const AMD_DEVICE_COHERENT_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_device_coherent_memory\0") }; pub const AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION: u32 = 1u32; pub const KHR_DYNAMIC_RENDERING_LOCAL_READ_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_dynamic_rendering_local_read\0") }; pub const KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRenderingAttachmentLocationsKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_location_info: *const RenderingAttachmentLocationInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRenderingInputAttachmentIndicesKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_location_info: *const RenderingInputAttachmentIndexInfoKHR<'_>, ); pub const EXT_SHADER_IMAGE_ATOMIC_INT64_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_image_atomic_int64\0") }; pub const EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_QUAD_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_quad_control\0") }; pub const KHR_SHADER_QUAD_CONTROL_SPEC_VERSION: u32 = 1u32; pub const KHR_SPIRV_1_4_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_spirv_1_4\0") }; pub const KHR_SPIRV_1_4_SPEC_VERSION: u32 = 1u32; pub const EXT_MEMORY_BUDGET_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_memory_budget\0") }; pub const EXT_MEMORY_BUDGET_SPEC_VERSION: u32 = 1u32; pub const EXT_MEMORY_PRIORITY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_memory_priority\0") }; pub const EXT_MEMORY_PRIORITY_SPEC_VERSION: u32 = 1u32; pub const KHR_SURFACE_PROTECTED_CAPABILITIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_surface_protected_capabilities\0") }; pub const KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION: u32 = 1u32; pub const NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_dedicated_allocation_image_aliasing\0") }; pub const NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION: u32 = 1u32; pub const KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_separate_depth_stencil_layouts\0") }; pub const KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION: u32 = 1u32; pub const EXT_BUFFER_DEVICE_ADDRESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_buffer_device_address\0") }; pub const EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetBufferDeviceAddress = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const BufferDeviceAddressInfo<'_>, ) -> DeviceAddress; pub const EXT_TOOLING_INFO_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_tooling_info\0") }; pub const EXT_TOOLING_INFO_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceToolProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_tool_count: *mut u32, p_tool_properties: *mut PhysicalDeviceToolProperties<'_>, ) -> Result; pub const EXT_SEPARATE_STENCIL_USAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_separate_stencil_usage\0") }; pub const EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION: u32 = 1u32; pub const EXT_VALIDATION_FEATURES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_features\0") }; pub const EXT_VALIDATION_FEATURES_SPEC_VERSION: u32 = 6u32; pub const KHR_PRESENT_WAIT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_present_wait\0") }; pub const KHR_PRESENT_WAIT_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkWaitForPresentKHR = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, present_id: u64, timeout: u64, ) -> Result; pub const NV_COOPERATIVE_MATRIX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_cooperative_matrix\0") }; pub const NV_COOPERATIVE_MATRIX_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut CooperativeMatrixPropertiesNV<'_>, ) -> Result; pub const NV_COVERAGE_REDUCTION_MODE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_coverage_reduction_mode\0") }; pub const NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = unsafe extern "system" fn( physical_device: PhysicalDevice, p_combination_count: *mut u32, p_combinations: *mut FramebufferMixedSamplesCombinationNV<'_>, ) -> Result; pub const EXT_FRAGMENT_SHADER_INTERLOCK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_shader_interlock\0") }; pub const EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION: u32 = 1u32; pub const EXT_YCBCR_IMAGE_ARRAYS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_ycbcr_image_arrays\0") }; pub const EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION: u32 = 1u32; pub const KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_uniform_buffer_standard_layout\0") }; pub const KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION: u32 = 1u32; pub const EXT_PROVOKING_VERTEX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_provoking_vertex\0") }; pub const EXT_PROVOKING_VERTEX_SPEC_VERSION: u32 = 1u32; pub const EXT_FULL_SCREEN_EXCLUSIVE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_full_screen_exclusive\0") }; pub const EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION: u32 = 4u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT = unsafe extern "system" fn( physical_device: PhysicalDevice, p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, p_present_mode_count: *mut u32, p_present_modes: *mut PresentModeKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAcquireFullScreenExclusiveModeEXT = unsafe extern "system" fn(device: crate::vk::Device, swapchain: SwapchainKHR) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkReleaseFullScreenExclusiveModeEXT = unsafe extern "system" fn(device: crate::vk::Device, swapchain: SwapchainKHR) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceGroupSurfacePresentModes2EXT = unsafe extern "system" fn( device: crate::vk::Device, p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR<'_>, p_modes: *mut DeviceGroupPresentModeFlagsKHR, ) -> Result; pub const EXT_HEADLESS_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_headless_surface\0") }; pub const EXT_HEADLESS_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateHeadlessSurfaceEXT = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const HeadlessSurfaceCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; pub const KHR_BUFFER_DEVICE_ADDRESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_buffer_device_address\0") }; pub const KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetBufferOpaqueCaptureAddress = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const BufferDeviceAddressInfo<'_>, ) -> u64; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceMemoryOpaqueCaptureAddress = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const DeviceMemoryOpaqueCaptureAddressInfo<'_>, ) -> u64; pub const EXT_LINE_RASTERIZATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_line_rasterization\0") }; pub const EXT_LINE_RASTERIZATION_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLineStippleKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, line_stipple_factor: u32, line_stipple_pattern: u16, ); pub const EXT_SHADER_ATOMIC_FLOAT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_atomic_float\0") }; pub const EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION: u32 = 1u32; pub const EXT_HOST_QUERY_RESET_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_host_query_reset\0") }; pub const EXT_HOST_QUERY_RESET_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkResetQueryPool = unsafe extern "system" fn( device: crate::vk::Device, query_pool: QueryPool, first_query: u32, query_count: u32, ); pub const EXT_INDEX_TYPE_UINT8_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_index_type_uint8\0") }; pub const EXT_INDEX_TYPE_UINT8_SPEC_VERSION: u32 = 1u32; pub const EXT_EXTENDED_DYNAMIC_STATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state\0") }; pub const EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCullMode = unsafe extern "system" fn(command_buffer: CommandBuffer, cull_mode: CullModeFlags); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetFrontFace = unsafe extern "system" fn(command_buffer: CommandBuffer, front_face: FrontFace); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPrimitiveTopology = unsafe extern "system" fn(command_buffer: CommandBuffer, primitive_topology: PrimitiveTopology); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewportWithCount = unsafe extern "system" fn( command_buffer: CommandBuffer, viewport_count: u32, p_viewports: *const Viewport, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetScissorWithCount = unsafe extern "system" fn( command_buffer: CommandBuffer, scissor_count: u32, p_scissors: *const Rect2D, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindVertexBuffers2 = unsafe extern "system" fn( command_buffer: CommandBuffer, first_binding: u32, binding_count: u32, p_buffers: *const Buffer, p_offsets: *const DeviceSize, p_sizes: *const DeviceSize, p_strides: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthTestEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_test_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthWriteEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_write_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthCompareOp = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_compare_op: CompareOp); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthBoundsTestEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_bounds_test_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetStencilTestEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, stencil_test_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetStencilOp = unsafe extern "system" fn( command_buffer: CommandBuffer, face_mask: StencilFaceFlags, fail_op: StencilOp, pass_op: StencilOp, depth_fail_op: StencilOp, compare_op: CompareOp, ); pub const KHR_DEFERRED_HOST_OPERATIONS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_deferred_host_operations\0") }; pub const KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION: u32 = 4u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateDeferredOperationKHR = unsafe extern "system" fn( device: crate::vk::Device, p_allocator: *const AllocationCallbacks<'_>, p_deferred_operation: *mut DeferredOperationKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDeferredOperationKHR = unsafe extern "system" fn( device: crate::vk::Device, operation: DeferredOperationKHR, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeferredOperationMaxConcurrencyKHR = unsafe extern "system" fn(device: crate::vk::Device, operation: DeferredOperationKHR) -> u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDeferredOperationResultKHR = unsafe extern "system" fn(device: crate::vk::Device, operation: DeferredOperationKHR) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDeferredOperationJoinKHR = unsafe extern "system" fn(device: crate::vk::Device, operation: DeferredOperationKHR) -> Result; pub const KHR_PIPELINE_EXECUTABLE_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_pipeline_executable_properties\0") }; pub const KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineExecutablePropertiesKHR = unsafe extern "system" fn( device: crate::vk::Device, p_pipeline_info: *const PipelineInfoKHR<'_>, p_executable_count: *mut u32, p_properties: *mut PipelineExecutablePropertiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineExecutableStatisticsKHR = unsafe extern "system" fn( device: crate::vk::Device, p_executable_info: *const PipelineExecutableInfoKHR<'_>, p_statistic_count: *mut u32, p_statistics: *mut PipelineExecutableStatisticKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineExecutableInternalRepresentationsKHR = unsafe extern "system" fn( device: crate::vk::Device, p_executable_info: *const PipelineExecutableInfoKHR<'_>, p_internal_representation_count: *mut u32, p_internal_representations: *mut PipelineExecutableInternalRepresentationKHR<'_>, ) -> Result; pub const EXT_HOST_IMAGE_COPY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_host_image_copy\0") }; pub const EXT_HOST_IMAGE_COPY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCopyMemoryToImageEXT = unsafe extern "system" fn( device: crate::vk::Device, p_copy_memory_to_image_info: *const CopyMemoryToImageInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyImageToMemoryEXT = unsafe extern "system" fn( device: crate::vk::Device, p_copy_image_to_memory_info: *const CopyImageToMemoryInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyImageToImageEXT = unsafe extern "system" fn( device: crate::vk::Device, p_copy_image_to_image_info: *const CopyImageToImageInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkTransitionImageLayoutEXT = unsafe extern "system" fn( device: crate::vk::Device, transition_count: u32, p_transitions: *const HostImageLayoutTransitionInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetImageSubresourceLayout2KHR = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_subresource: *const ImageSubresource2KHR<'_>, p_layout: *mut SubresourceLayout2KHR<'_>, ); pub const KHR_MAP_MEMORY2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_map_memory2\0") }; pub const KHR_MAP_MEMORY2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkMapMemory2KHR = unsafe extern "system" fn( device: crate::vk::Device, p_memory_map_info: *const MemoryMapInfoKHR<'_>, pp_data: *mut *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkUnmapMemory2KHR = unsafe extern "system" fn( device: crate::vk::Device, p_memory_unmap_info: *const MemoryUnmapInfoKHR<'_>, ) -> Result; pub const EXT_MAP_MEMORY_PLACED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_map_memory_placed\0") }; pub const EXT_MAP_MEMORY_PLACED_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_ATOMIC_FLOAT2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_atomic_float2\0") }; pub const EXT_SHADER_ATOMIC_FLOAT2_SPEC_VERSION: u32 = 1u32; pub const EXT_SURFACE_MAINTENANCE1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_surface_maintenance1\0") }; pub const EXT_SURFACE_MAINTENANCE1_SPEC_VERSION: u32 = 1u32; pub const EXT_SWAPCHAIN_MAINTENANCE1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_swapchain_maintenance1\0") }; pub const EXT_SWAPCHAIN_MAINTENANCE1_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkReleaseSwapchainImagesEXT = unsafe extern "system" fn( device: crate::vk::Device, p_release_info: *const ReleaseSwapchainImagesInfoEXT<'_>, ) -> Result; pub const EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_demote_to_helper_invocation\0") }; pub const EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION: u32 = 1u32; pub const NV_DEVICE_GENERATED_COMMANDS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_generated_commands\0") }; pub const NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkGetGeneratedCommandsMemoryRequirementsNV = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const GeneratedCommandsMemoryRequirementsInfoNV<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPreprocessGeneratedCommandsNV = unsafe extern "system" fn( command_buffer: CommandBuffer, p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdExecuteGeneratedCommandsNV = unsafe extern "system" fn( command_buffer: CommandBuffer, is_preprocessed: Bool32, p_generated_commands_info: *const GeneratedCommandsInfoNV<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindPipelineShaderGroupNV = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, pipeline: Pipeline, group_index: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateIndirectCommandsLayoutNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const IndirectCommandsLayoutCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_indirect_commands_layout: *mut IndirectCommandsLayoutNV, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyIndirectCommandsLayoutNV = unsafe extern "system" fn( device: crate::vk::Device, indirect_commands_layout: IndirectCommandsLayoutNV, p_allocator: *const AllocationCallbacks<'_>, ); pub const NV_INHERITED_VIEWPORT_SCISSOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_inherited_viewport_scissor\0") }; pub const NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_INTEGER_DOT_PRODUCT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_integer_dot_product\0") }; pub const KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION: u32 = 1u32; pub const EXT_TEXEL_BUFFER_ALIGNMENT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_texel_buffer_alignment\0") }; pub const EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION: u32 = 1u32; pub const QCOM_RENDER_PASS_TRANSFORM_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_transform\0") }; pub const QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION: u32 = 4u32; pub const EXT_DEPTH_BIAS_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_bias_control\0") }; pub const EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthBias2EXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_depth_bias_info: *const DepthBiasInfoEXT<'_>, ); pub const EXT_DEVICE_MEMORY_REPORT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_device_memory_report\0") }; pub const EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION: u32 = 2u32; pub const EXT_ACQUIRE_DRM_DISPLAY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_acquire_drm_display\0") }; pub const EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkAcquireDrmDisplayEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, drm_fd: i32, display: DisplayKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDrmDisplayEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, drm_fd: i32, connector_id: u32, display: *mut DisplayKHR, ) -> Result; pub const EXT_ROBUSTNESS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_robustness2\0") }; pub const EXT_ROBUSTNESS2_SPEC_VERSION: u32 = 1u32; pub const EXT_CUSTOM_BORDER_COLOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_custom_border_color\0") }; pub const EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION: u32 = 12u32; pub const GOOGLE_USER_TYPE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_user_type\0") }; pub const GOOGLE_USER_TYPE_SPEC_VERSION: u32 = 1u32; pub const KHR_PIPELINE_LIBRARY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_pipeline_library\0") }; pub const KHR_PIPELINE_LIBRARY_SPEC_VERSION: u32 = 1u32; pub const NV_PRESENT_BARRIER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_present_barrier\0") }; pub const NV_PRESENT_BARRIER_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_NON_SEMANTIC_INFO_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_non_semantic_info\0") }; pub const KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION: u32 = 1u32; pub const KHR_PRESENT_ID_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_present_id\0") }; pub const KHR_PRESENT_ID_SPEC_VERSION: u32 = 1u32; pub const EXT_PRIVATE_DATA_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_private_data\0") }; pub const EXT_PRIVATE_DATA_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreatePrivateDataSlot = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const PrivateDataSlotCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_private_data_slot: *mut PrivateDataSlot, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyPrivateDataSlot = unsafe extern "system" fn( device: crate::vk::Device, private_data_slot: PrivateDataSlot, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkSetPrivateData = unsafe extern "system" fn( device: crate::vk::Device, object_type: ObjectType, object_handle: u64, private_data_slot: PrivateDataSlot, data: u64, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPrivateData = unsafe extern "system" fn( device: crate::vk::Device, object_type: ObjectType, object_handle: u64, private_data_slot: PrivateDataSlot, p_data: *mut u64, ); pub const EXT_PIPELINE_CREATION_CACHE_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_creation_cache_control\0") }; pub const EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION: u32 = 3u32; pub const KHR_VIDEO_ENCODE_QUEUE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_encode_queue\0") }; pub const KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION: u32 = 12u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_quality_level_info: *const PhysicalDeviceVideoEncodeQualityLevelInfoKHR<'_>, p_quality_level_properties: *mut VideoEncodeQualityLevelPropertiesKHR<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetEncodedVideoSessionParametersKHR = unsafe extern "system" fn( device: crate::vk::Device, p_video_session_parameters_info: *const VideoEncodeSessionParametersGetInfoKHR<'_>, p_feedback_info: *mut VideoEncodeSessionParametersFeedbackInfoKHR<'_>, p_data_size: *mut usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdEncodeVideoKHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_encode_info: *const VideoEncodeInfoKHR<'_>, ); pub const NV_DEVICE_DIAGNOSTICS_CONFIG_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_diagnostics_config\0") }; pub const NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION: u32 = 2u32; pub const QCOM_RENDER_PASS_STORE_OPS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_store_ops\0") }; pub const QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION: u32 = 2u32; pub const NV_CUDA_KERNEL_LAUNCH_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_cuda_kernel_launch\0") }; pub const NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateCudaModuleNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const CudaModuleCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_module: *mut CudaModuleNV, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetCudaModuleCacheNV = unsafe extern "system" fn( device: crate::vk::Device, module: CudaModuleNV, p_cache_size: *mut usize, p_cache_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateCudaFunctionNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const CudaFunctionCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_function: *mut CudaFunctionNV, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyCudaModuleNV = unsafe extern "system" fn( device: crate::vk::Device, module: CudaModuleNV, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkDestroyCudaFunctionNV = unsafe extern "system" fn( device: crate::vk::Device, function: CudaFunctionNV, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCudaLaunchKernelNV = unsafe extern "system" fn( command_buffer: CommandBuffer, p_launch_info: *const CudaLaunchInfoNV<'_>, ); pub const NV_LOW_LATENCY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_low_latency\0") }; pub const NV_LOW_LATENCY_SPEC_VERSION: u32 = 1u32; pub const EXT_METAL_OBJECTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_metal_objects\0") }; pub const EXT_METAL_OBJECTS_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkExportMetalObjectsEXT = unsafe extern "system" fn( device: crate::vk::Device, p_metal_objects_info: *mut ExportMetalObjectsInfoEXT<'_>, ); pub const KHR_SYNCHRONIZATION2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_synchronization2\0") }; pub const KHR_SYNCHRONIZATION2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetEvent2 = unsafe extern "system" fn( command_buffer: CommandBuffer, event: Event, p_dependency_info: *const DependencyInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdResetEvent2 = unsafe extern "system" fn( command_buffer: CommandBuffer, event: Event, stage_mask: PipelineStageFlags2, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdWaitEvents2 = unsafe extern "system" fn( command_buffer: CommandBuffer, event_count: u32, p_events: *const Event, p_dependency_infos: *const DependencyInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPipelineBarrier2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_dependency_info: *const DependencyInfo<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteTimestamp2 = unsafe extern "system" fn( command_buffer: CommandBuffer, stage: PipelineStageFlags2, query_pool: QueryPool, query: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkQueueSubmit2 = unsafe extern "system" fn( queue: Queue, submit_count: u32, p_submits: *const SubmitInfo2<'_>, fence: Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteBufferMarker2AMD = unsafe extern "system" fn( command_buffer: CommandBuffer, stage: PipelineStageFlags2, dst_buffer: Buffer, dst_offset: DeviceSize, marker: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkGetQueueCheckpointData2NV = unsafe extern "system" fn( queue: Queue, p_checkpoint_data_count: *mut u32, p_checkpoint_data: *mut CheckpointData2NV<'_>, ); pub const EXT_DESCRIPTOR_BUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_descriptor_buffer\0") }; pub const EXT_DESCRIPTOR_BUFFER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorSetLayoutSizeEXT = unsafe extern "system" fn( device: crate::vk::Device, layout: DescriptorSetLayout, p_layout_size_in_bytes: *mut DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorSetLayoutBindingOffsetEXT = unsafe extern "system" fn( device: crate::vk::Device, layout: DescriptorSetLayout, binding: u32, p_offset: *mut DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorEXT = unsafe extern "system" fn( device: crate::vk::Device, p_descriptor_info: *const DescriptorGetInfoEXT<'_>, data_size: usize, p_descriptor: *mut c_void, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindDescriptorBuffersEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer_count: u32, p_binding_infos: *const DescriptorBufferBindingInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDescriptorBufferOffsetsEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, layout: PipelineLayout, first_set: u32, set_count: u32, p_buffer_indices: *const u32, p_offsets: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, layout: PipelineLayout, set: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const BufferCaptureDescriptorDataInfoEXT<'_>, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetImageOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const ImageCaptureDescriptorDataInfoEXT<'_>, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const ImageViewCaptureDescriptorDataInfoEXT<'_>, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const SamplerCaptureDescriptorDataInfoEXT<'_>, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const AccelerationStructureCaptureDescriptorDataInfoEXT<'_>, p_data: *mut c_void, ) -> Result; pub const EXT_GRAPHICS_PIPELINE_LIBRARY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_graphics_pipeline_library\0") }; pub const EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION: u32 = 1u32; pub const AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_early_and_late_fragment_tests\0") }; pub const AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION: u32 = 1u32; pub const KHR_FRAGMENT_SHADER_BARYCENTRIC_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_fragment_shader_barycentric\0") }; pub const KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_subgroup_uniform_control_flow\0") }; pub const KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION: u32 = 1u32; pub const KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_zero_initialize_workgroup_memory\0") }; pub const KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION: u32 = 1u32; pub const NV_FRAGMENT_SHADING_RATE_ENUMS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_shading_rate_enums\0") }; pub const NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetFragmentShadingRateEnumNV = unsafe extern "system" fn( command_buffer: CommandBuffer, shading_rate: FragmentShadingRateNV, combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2usize], ); pub const NV_RAY_TRACING_MOTION_BLUR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing_motion_blur\0") }; pub const NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION: u32 = 1u32; pub const EXT_MESH_SHADER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_mesh_shader\0") }; pub const EXT_MESH_SHADER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, group_count_x: u32, group_count_y: u32, group_count_z: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksIndirectEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, draw_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMeshTasksIndirectCountEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, count_buffer: Buffer, count_buffer_offset: DeviceSize, max_draw_count: u32, stride: u32, ); pub const EXT_YCBCR_2PLANE_444_FORMATS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_ycbcr_2plane_444_formats\0") }; pub const EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION: u32 = 1u32; pub const EXT_FRAGMENT_DENSITY_MAP2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_density_map2\0") }; pub const EXT_FRAGMENT_DENSITY_MAP2_SPEC_VERSION: u32 = 1u32; pub const QCOM_ROTATED_COPY_COMMANDS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_rotated_copy_commands\0") }; pub const QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION: u32 = 2u32; pub const EXT_IMAGE_ROBUSTNESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_robustness\0") }; pub const EXT_IMAGE_ROBUSTNESS_SPEC_VERSION: u32 = 1u32; pub const KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_workgroup_memory_explicit_layout\0") }; pub const KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION: u32 = 1u32; pub const KHR_COPY_COMMANDS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_copy_commands2\0") }; pub const KHR_COPY_COMMANDS2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyBuffer2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_copy_buffer_info: *const CopyBufferInfo2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyImage2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_copy_image_info: *const CopyImageInfo2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyBufferToImage2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyImageToBuffer2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBlitImage2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_blit_image_info: *const BlitImageInfo2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdResolveImage2 = unsafe extern "system" fn( command_buffer: CommandBuffer, p_resolve_image_info: *const ResolveImageInfo2<'_>, ); pub const EXT_IMAGE_COMPRESSION_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_compression_control\0") }; pub const EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION: u32 = 1u32; pub const EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_attachment_feedback_loop_layout\0") }; pub const EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION: u32 = 2u32; pub const EXT_4444_FORMATS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_4444_formats\0") }; pub const EXT_4444_FORMATS_SPEC_VERSION: u32 = 1u32; pub const EXT_DEVICE_FAULT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_device_fault\0") }; pub const EXT_DEVICE_FAULT_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceFaultInfoEXT = unsafe extern "system" fn( device: crate::vk::Device, p_fault_counts: *mut DeviceFaultCountsEXT<'_>, p_fault_info: *mut DeviceFaultInfoEXT<'_>, ) -> Result; pub const ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ARM_rasterization_order_attachment_access\0") }; pub const ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION: u32 = 1u32; pub const EXT_RGBA10X6_FORMATS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_rgba10x6_formats\0") }; pub const EXT_RGBA10X6_FORMATS_SPEC_VERSION: u32 = 1u32; pub const NV_ACQUIRE_WINRT_DISPLAY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_acquire_winrt_display\0") }; pub const NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkAcquireWinrtDisplayNV = unsafe extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetWinrtDisplayNV = unsafe extern "system" fn( physical_device: PhysicalDevice, device_relative_id: u32, p_display: *mut DisplayKHR, ) -> Result; pub const EXT_DIRECTFB_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_directfb_surface\0") }; pub const EXT_DIRECTFB_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateDirectFBSurfaceEXT = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const DirectFBSurfaceCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, dfb: *mut IDirectFB, ) -> Bool32; pub const VALVE_MUTABLE_DESCRIPTOR_TYPE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_VALVE_mutable_descriptor_type\0") }; pub const VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION: u32 = 1u32; pub const EXT_VERTEX_INPUT_DYNAMIC_STATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_vertex_input_dynamic_state\0") }; pub const EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetVertexInputEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, vertex_binding_description_count: u32, p_vertex_binding_descriptions: *const VertexInputBindingDescription2EXT<'_>, vertex_attribute_description_count: u32, p_vertex_attribute_descriptions: *const VertexInputAttributeDescription2EXT<'_>, ); pub const EXT_PHYSICAL_DEVICE_DRM_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_physical_device_drm\0") }; pub const EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION: u32 = 1u32; pub const EXT_DEVICE_ADDRESS_BINDING_REPORT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_device_address_binding_report\0") }; pub const EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION: u32 = 1u32; pub const EXT_DEPTH_CLIP_CONTROL_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clip_control\0") }; pub const EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION: u32 = 1u32; pub const EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_primitive_topology_list_restart\0") }; pub const EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION: u32 = 1u32; pub const KHR_FORMAT_FEATURE_FLAGS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_format_feature_flags2\0") }; pub const KHR_FORMAT_FEATURE_FLAGS2_SPEC_VERSION: u32 = 2u32; pub const FUCHSIA_EXTERNAL_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_external_memory\0") }; pub const FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryZirconHandleFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, p_get_zircon_handle_info: *const MemoryGetZirconHandleInfoFUCHSIA<'_>, p_zircon_handle: *mut zx_handle_t, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, handle_type: ExternalMemoryHandleTypeFlags, zircon_handle: zx_handle_t, p_memory_zircon_handle_properties: *mut MemoryZirconHandlePropertiesFUCHSIA<'_>, ) -> Result; pub const FUCHSIA_EXTERNAL_SEMAPHORE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_external_semaphore\0") }; pub const FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkImportSemaphoreZirconHandleFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, p_import_semaphore_zircon_handle_info: *const ImportSemaphoreZirconHandleInfoFUCHSIA<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetSemaphoreZirconHandleFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, p_get_zircon_handle_info: *const SemaphoreGetZirconHandleInfoFUCHSIA<'_>, p_zircon_handle: *mut zx_handle_t, ) -> Result; pub const FUCHSIA_BUFFER_COLLECTION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_buffer_collection\0") }; pub const FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateBufferCollectionFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const BufferCollectionCreateInfoFUCHSIA<'_>, p_allocator: *const AllocationCallbacks<'_>, p_collection: *mut BufferCollectionFUCHSIA, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSetBufferCollectionImageConstraintsFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, collection: BufferCollectionFUCHSIA, p_image_constraints_info: *const ImageConstraintsInfoFUCHSIA<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, collection: BufferCollectionFUCHSIA, p_buffer_constraints_info: *const BufferConstraintsInfoFUCHSIA<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyBufferCollectionFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, collection: BufferCollectionFUCHSIA, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetBufferCollectionPropertiesFUCHSIA = unsafe extern "system" fn( device: crate::vk::Device, collection: BufferCollectionFUCHSIA, p_properties: *mut BufferCollectionPropertiesFUCHSIA<'_>, ) -> Result; pub const HUAWEI_SUBPASS_SHADING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_subpass_shading\0") }; pub const HUAWEI_SUBPASS_SHADING_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = unsafe extern "system" fn( device: crate::vk::Device, renderpass: RenderPass, p_max_workgroup_size: *mut Extent2D, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdSubpassShadingHUAWEI = unsafe extern "system" fn(command_buffer: CommandBuffer); pub const HUAWEI_INVOCATION_MASK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_invocation_mask\0") }; pub const HUAWEI_INVOCATION_MASK_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindInvocationMaskHUAWEI = unsafe extern "system" fn( command_buffer: CommandBuffer, image_view: ImageView, image_layout: ImageLayout, ); pub const NV_EXTERNAL_MEMORY_RDMA_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_rdma\0") }; pub const NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetMemoryRemoteAddressNV = unsafe extern "system" fn( device: crate::vk::Device, p_memory_get_remote_address_info: *const MemoryGetRemoteAddressInfoNV<'_>, p_address: *mut RemoteAddressNV, ) -> Result; pub const EXT_PIPELINE_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_properties\0") }; pub const EXT_PIPELINE_PROPERTIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPipelinePropertiesEXT = unsafe extern "system" fn( device: crate::vk::Device, p_pipeline_info: *const PipelineInfoEXT<'_>, p_pipeline_properties: *mut BaseOutStructure<'_>, ) -> Result; pub const EXT_FRAME_BOUNDARY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_frame_boundary\0") }; pub const EXT_FRAME_BOUNDARY_SPEC_VERSION: u32 = 1u32; pub const EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_multisampled_render_to_single_sampled\0") }; pub const EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION: u32 = 1u32; pub const EXT_EXTENDED_DYNAMIC_STATE2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state2\0") }; pub const EXT_EXTENDED_DYNAMIC_STATE2_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPatchControlPointsEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, patch_control_points: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRasterizerDiscardEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, rasterizer_discard_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthBiasEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_bias_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLogicOpEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, logic_op: LogicOp); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPrimitiveRestartEnable = unsafe extern "system" fn(command_buffer: CommandBuffer, primitive_restart_enable: Bool32); pub const QNX_SCREEN_SURFACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QNX_screen_surface\0") }; pub const QNX_SCREEN_SURFACE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateScreenSurfaceQNX = unsafe extern "system" fn( instance: crate::vk::Instance, p_create_info: *const ScreenSurfaceCreateInfoQNX<'_>, p_allocator: *const AllocationCallbacks<'_>, p_surface: *mut SurfaceKHR, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX = unsafe extern "system" fn( physical_device: PhysicalDevice, queue_family_index: u32, window: *mut _screen_window, ) -> Bool32; pub const EXT_COLOR_WRITE_ENABLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_color_write_enable\0") }; pub const EXT_COLOR_WRITE_ENABLE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetColorWriteEnableEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, attachment_count: u32, p_color_write_enables: *const Bool32, ); pub const EXT_PRIMITIVES_GENERATED_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_primitives_generated_query\0") }; pub const EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION: u32 = 1u32; pub const KHR_RAY_TRACING_MAINTENANCE1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_tracing_maintenance1\0") }; pub const KHR_RAY_TRACING_MAINTENANCE1_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdTraceRaysIndirect2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, indirect_device_address: DeviceAddress, ); pub const EXT_GLOBAL_PRIORITY_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_global_priority_query\0") }; pub const EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION: u32 = 1u32; pub const EXT_IMAGE_VIEW_MIN_LOD_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_view_min_lod\0") }; pub const EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION: u32 = 1u32; pub const EXT_MULTI_DRAW_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_multi_draw\0") }; pub const EXT_MULTI_DRAW_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMultiEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, draw_count: u32, p_vertex_info: *const MultiDrawInfoEXT, instance_count: u32, first_instance: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawMultiIndexedEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, draw_count: u32, p_index_info: *const MultiDrawIndexedInfoEXT, instance_count: u32, first_instance: u32, stride: u32, p_vertex_offset: *const i32, ); pub const EXT_IMAGE_2D_VIEW_OF_3D_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_2d_view_of_3d\0") }; pub const EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION: u32 = 1u32; pub const KHR_PORTABILITY_ENUMERATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_portability_enumeration\0") }; pub const KHR_PORTABILITY_ENUMERATION_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_TILE_IMAGE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_tile_image\0") }; pub const EXT_SHADER_TILE_IMAGE_SPEC_VERSION: u32 = 1u32; pub const EXT_OPACITY_MICROMAP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_opacity_micromap\0") }; pub const EXT_OPACITY_MICROMAP_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateMicromapEXT = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const MicromapCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_micromap: *mut MicromapEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyMicromapEXT = unsafe extern "system" fn( device: crate::vk::Device, micromap: MicromapEXT, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBuildMicromapsEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, info_count: u32, p_infos: *const MicromapBuildInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkBuildMicromapsEXT = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, info_count: u32, p_infos: *const MicromapBuildInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyMicromapEXT = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyMicromapInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyMicromapToMemoryEXT = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyMicromapToMemoryInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCopyMemoryToMicromapEXT = unsafe extern "system" fn( device: crate::vk::Device, deferred_operation: DeferredOperationKHR, p_info: *const CopyMemoryToMicromapInfoEXT<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkWriteMicromapsPropertiesEXT = unsafe extern "system" fn( device: crate::vk::Device, micromap_count: u32, p_micromaps: *const MicromapEXT, query_type: QueryType, data_size: usize, p_data: *mut c_void, stride: usize, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMicromapEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyMicromapInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMicromapToMemoryEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyMicromapToMemoryInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMemoryToMicromapEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_info: *const CopyMemoryToMicromapInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteMicromapsPropertiesEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, micromap_count: u32, p_micromaps: *const MicromapEXT, query_type: QueryType, query_pool: QueryPool, first_query: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceMicromapCompatibilityEXT = unsafe extern "system" fn( device: crate::vk::Device, p_version_info: *const MicromapVersionInfoEXT<'_>, p_compatibility: *mut AccelerationStructureCompatibilityKHR, ); #[allow(non_camel_case_types)] pub type PFN_vkGetMicromapBuildSizesEXT = unsafe extern "system" fn( device: crate::vk::Device, build_type: AccelerationStructureBuildTypeKHR, p_build_info: *const MicromapBuildInfoEXT<'_>, p_size_info: *mut MicromapBuildSizesInfoEXT<'_>, ); pub const NV_DISPLACEMENT_MICROMAP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_displacement_micromap\0") }; pub const NV_DISPLACEMENT_MICROMAP_SPEC_VERSION: u32 = 2u32; pub const EXT_LOAD_STORE_OP_NONE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_load_store_op_none\0") }; pub const EXT_LOAD_STORE_OP_NONE_SPEC_VERSION: u32 = 1u32; pub const HUAWEI_CLUSTER_CULLING_SHADER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_cluster_culling_shader\0") }; pub const HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION: u32 = 3u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawClusterHUAWEI = unsafe extern "system" fn( command_buffer: CommandBuffer, group_count_x: u32, group_count_y: u32, group_count_z: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawClusterIndirectHUAWEI = unsafe extern "system" fn(command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize); pub const EXT_BORDER_COLOR_SWIZZLE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_border_color_swizzle\0") }; pub const EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION: u32 = 1u32; pub const EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pageable_device_local_memory\0") }; pub const EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkSetDeviceMemoryPriorityEXT = unsafe extern "system" fn(device: crate::vk::Device, memory: DeviceMemory, priority: f32); pub const KHR_MAINTENANCE4_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance4\0") }; pub const KHR_MAINTENANCE4_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceBufferMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const DeviceBufferMemoryRequirements<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceImageMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const DeviceImageMemoryRequirements<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceImageSparseMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const DeviceImageMemoryRequirements<'_>, p_sparse_memory_requirement_count: *mut u32, p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2<'_>, ); pub const ARM_SHADER_CORE_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ARM_shader_core_properties\0") }; pub const ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_SUBGROUP_ROTATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_subgroup_rotate\0") }; pub const KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION: u32 = 2u32; pub const ARM_SCHEDULING_CONTROLS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ARM_scheduling_controls\0") }; pub const ARM_SCHEDULING_CONTROLS_SPEC_VERSION: u32 = 1u32; pub const EXT_IMAGE_SLICED_VIEW_OF_3D_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_sliced_view_of_3d\0") }; pub const EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION: u32 = 1u32; pub const VALVE_DESCRIPTOR_SET_HOST_MAPPING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_VALVE_descriptor_set_host_mapping\0") }; pub const VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE = unsafe extern "system" fn( device: crate::vk::Device, p_binding_reference: *const DescriptorSetBindingReferenceVALVE<'_>, p_host_mapping: *mut DescriptorSetLayoutHostMappingInfoVALVE<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDescriptorSetHostMappingVALVE = unsafe extern "system" fn( device: crate::vk::Device, descriptor_set: DescriptorSet, pp_data: *mut *mut c_void, ); pub const EXT_DEPTH_CLAMP_ZERO_ONE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clamp_zero_one\0") }; pub const EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION: u32 = 1u32; pub const EXT_NON_SEAMLESS_CUBE_MAP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_non_seamless_cube_map\0") }; pub const EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION: u32 = 1u32; pub const ARM_RENDER_PASS_STRIPED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ARM_render_pass_striped\0") }; pub const ARM_RENDER_PASS_STRIPED_SPEC_VERSION: u32 = 1u32; pub const QCOM_FRAGMENT_DENSITY_MAP_OFFSET_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_fragment_density_map_offset\0") }; pub const QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION: u32 = 1u32; pub const NV_COPY_MEMORY_INDIRECT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_copy_memory_indirect\0") }; pub const NV_COPY_MEMORY_INDIRECT_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMemoryIndirectNV = unsafe extern "system" fn( command_buffer: CommandBuffer, copy_buffer_address: DeviceAddress, copy_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyMemoryToImageIndirectNV = unsafe extern "system" fn( command_buffer: CommandBuffer, copy_buffer_address: DeviceAddress, copy_count: u32, stride: u32, dst_image: Image, dst_image_layout: ImageLayout, p_image_subresources: *const ImageSubresourceLayers, ); pub const NV_MEMORY_DECOMPRESSION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_memory_decompression\0") }; pub const NV_MEMORY_DECOMPRESSION_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdDecompressMemoryNV = unsafe extern "system" fn( command_buffer: CommandBuffer, decompress_region_count: u32, p_decompress_memory_regions: *const DecompressMemoryRegionNV, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDecompressMemoryIndirectCountNV = unsafe extern "system" fn( command_buffer: CommandBuffer, indirect_commands_address: DeviceAddress, indirect_commands_count_address: DeviceAddress, stride: u32, ); pub const NV_DEVICE_GENERATED_COMMANDS_COMPUTE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_generated_commands_compute\0") }; pub const NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineIndirectMemoryRequirementsNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ComputePipelineCreateInfo<'_>, p_memory_requirements: *mut MemoryRequirements2<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdUpdatePipelineIndirectBufferNV = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, pipeline: Pipeline, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineIndirectDeviceAddressNV = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const PipelineIndirectDeviceAddressInfoNV<'_>, ) -> DeviceAddress; pub const NV_LINEAR_COLOR_ATTACHMENT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_linear_color_attachment\0") }; pub const NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION: u32 = 1u32; pub const GOOGLE_SURFACELESS_QUERY_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_surfaceless_query\0") }; pub const GOOGLE_SURFACELESS_QUERY_SPEC_VERSION: u32 = 2u32; pub const KHR_SHADER_MAXIMAL_RECONVERGENCE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_maximal_reconvergence\0") }; pub const KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION: u32 = 1u32; pub const EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_compression_control_swapchain\0") }; pub const EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION: u32 = 1u32; pub const QCOM_IMAGE_PROCESSING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_image_processing\0") }; pub const QCOM_IMAGE_PROCESSING_SPEC_VERSION: u32 = 1u32; pub const EXT_NESTED_COMMAND_BUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_nested_command_buffer\0") }; pub const EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION: u32 = 1u32; pub const EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_external_memory_acquire_unmodified\0") }; pub const EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION: u32 = 1u32; pub const EXT_EXTENDED_DYNAMIC_STATE3_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state3\0") }; pub const EXT_EXTENDED_DYNAMIC_STATE3_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthClampEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_clamp_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetPolygonModeEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, polygon_mode: PolygonMode); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRasterizationSamplesEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, rasterization_samples: SampleCountFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetSampleMaskEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, samples: SampleCountFlags, p_sample_mask: *const SampleMask, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetAlphaToCoverageEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, alpha_to_coverage_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetAlphaToOneEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, alpha_to_one_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLogicOpEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, logic_op_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetColorBlendEnableEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_attachment: u32, attachment_count: u32, p_color_blend_enables: *const Bool32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetColorBlendEquationEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_attachment: u32, attachment_count: u32, p_color_blend_equations: *const ColorBlendEquationEXT, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetColorWriteMaskEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_attachment: u32, attachment_count: u32, p_color_write_masks: *const ColorComponentFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetTessellationDomainOriginEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, domain_origin: TessellationDomainOrigin, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRasterizationStreamEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, rasterization_stream: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetConservativeRasterizationModeEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, conservative_rasterization_mode: ConservativeRasterizationModeEXT, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, extra_primitive_overestimation_size: f32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthClipEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, depth_clip_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetSampleLocationsEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, sample_locations_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetColorBlendAdvancedEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, first_attachment: u32, attachment_count: u32, p_color_blend_advanced: *const ColorBlendAdvancedEXT, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetProvokingVertexModeEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, provoking_vertex_mode: ProvokingVertexModeEXT, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLineRasterizationModeEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, line_rasterization_mode: LineRasterizationModeEXT, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLineStippleEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, stippled_line_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthClipNegativeOneToOneEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, negative_one_to_one: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewportWScalingEnableNV = unsafe extern "system" fn(command_buffer: CommandBuffer, viewport_w_scaling_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewportSwizzleNV = unsafe extern "system" fn( command_buffer: CommandBuffer, first_viewport: u32, viewport_count: u32, p_viewport_swizzles: *const ViewportSwizzleNV, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageToColorEnableNV = unsafe extern "system" fn(command_buffer: CommandBuffer, coverage_to_color_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageToColorLocationNV = unsafe extern "system" fn(command_buffer: CommandBuffer, coverage_to_color_location: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageModulationModeNV = unsafe extern "system" fn( command_buffer: CommandBuffer, coverage_modulation_mode: CoverageModulationModeNV, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageModulationTableEnableNV = unsafe extern "system" fn( command_buffer: CommandBuffer, coverage_modulation_table_enable: Bool32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageModulationTableNV = unsafe extern "system" fn( command_buffer: CommandBuffer, coverage_modulation_table_count: u32, p_coverage_modulation_table: *const f32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetShadingRateImageEnableNV = unsafe extern "system" fn(command_buffer: CommandBuffer, shading_rate_image_enable: Bool32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetRepresentativeFragmentTestEnableNV = unsafe extern "system" fn( command_buffer: CommandBuffer, representative_fragment_test_enable: Bool32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetCoverageReductionModeNV = unsafe extern "system" fn( command_buffer: CommandBuffer, coverage_reduction_mode: CoverageReductionModeNV, ); pub const EXT_SUBPASS_MERGE_FEEDBACK_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_subpass_merge_feedback\0") }; pub const EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION: u32 = 2u32; pub const LUNARG_DIRECT_DRIVER_LOADING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_LUNARG_direct_driver_loading\0") }; pub const LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_MODULE_IDENTIFIER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_module_identifier\0") }; pub const EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetShaderModuleIdentifierEXT = unsafe extern "system" fn( device: crate::vk::Device, shader_module: ShaderModule, p_identifier: *mut ShaderModuleIdentifierEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetShaderModuleCreateInfoIdentifierEXT = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ShaderModuleCreateInfo<'_>, p_identifier: *mut ShaderModuleIdentifierEXT<'_>, ); pub const EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_rasterization_order_attachment_access\0") }; pub const EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION: u32 = 1u32; pub const NV_OPTICAL_FLOW_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_optical_flow\0") }; pub const NV_OPTICAL_FLOW_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = unsafe extern "system" fn( physical_device: PhysicalDevice, p_optical_flow_image_format_info: *const OpticalFlowImageFormatInfoNV<'_>, p_format_count: *mut u32, p_image_format_properties: *mut OpticalFlowImageFormatPropertiesNV<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateOpticalFlowSessionNV = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const OpticalFlowSessionCreateInfoNV<'_>, p_allocator: *const AllocationCallbacks<'_>, p_session: *mut OpticalFlowSessionNV, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyOpticalFlowSessionNV = unsafe extern "system" fn( device: crate::vk::Device, session: OpticalFlowSessionNV, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkBindOpticalFlowSessionImageNV = unsafe extern "system" fn( device: crate::vk::Device, session: OpticalFlowSessionNV, binding_point: OpticalFlowSessionBindingPointNV, view: ImageView, layout: ImageLayout, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdOpticalFlowExecuteNV = unsafe extern "system" fn( command_buffer: CommandBuffer, session: OpticalFlowSessionNV, p_execute_info: *const OpticalFlowExecuteInfoNV<'_>, ); pub const EXT_LEGACY_DITHERING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_legacy_dithering\0") }; pub const EXT_LEGACY_DITHERING_SPEC_VERSION: u32 = 1u32; pub const EXT_PIPELINE_PROTECTED_ACCESS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_protected_access\0") }; pub const EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION: u32 = 1u32; pub const ANDROID_EXTERNAL_FORMAT_RESOLVE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ANDROID_external_format_resolve\0") }; pub const ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION: u32 = 1u32; pub const KHR_MAINTENANCE5_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance5\0") }; pub const KHR_MAINTENANCE5_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindIndexBuffer2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, size: DeviceSize, index_type: IndexType, ); #[allow(non_camel_case_types)] pub type PFN_vkGetRenderingAreaGranularityKHR = unsafe extern "system" fn( device: crate::vk::Device, p_rendering_area_info: *const RenderingAreaInfoKHR<'_>, p_granularity: *mut Extent2D, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceImageSubresourceLayoutKHR = unsafe extern "system" fn( device: crate::vk::Device, p_info: *const DeviceImageSubresourceInfoKHR<'_>, p_layout: *mut SubresourceLayout2KHR<'_>, ); pub const KHR_RAY_TRACING_POSITION_FETCH_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_tracing_position_fetch\0") }; pub const KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION: u32 = 1u32; pub const EXT_SHADER_OBJECT_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_object\0") }; pub const EXT_SHADER_OBJECT_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCreateShadersEXT = unsafe extern "system" fn( device: crate::vk::Device, create_info_count: u32, p_create_infos: *const ShaderCreateInfoEXT<'_>, p_allocator: *const AllocationCallbacks<'_>, p_shaders: *mut ShaderEXT, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyShaderEXT = unsafe extern "system" fn( device: crate::vk::Device, shader: ShaderEXT, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetShaderBinaryDataEXT = unsafe extern "system" fn( device: crate::vk::Device, shader: ShaderEXT, p_data_size: *mut usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindShadersEXT = unsafe extern "system" fn( command_buffer: CommandBuffer, stage_count: u32, p_stages: *const ShaderStageFlags, p_shaders: *const ShaderEXT, ); pub const QCOM_TILE_PROPERTIES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_tile_properties\0") }; pub const QCOM_TILE_PROPERTIES_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetFramebufferTilePropertiesQCOM = unsafe extern "system" fn( device: crate::vk::Device, framebuffer: Framebuffer, p_properties_count: *mut u32, p_properties: *mut TilePropertiesQCOM<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDynamicRenderingTilePropertiesQCOM = unsafe extern "system" fn( device: crate::vk::Device, p_rendering_info: *const RenderingInfo<'_>, p_properties: *mut TilePropertiesQCOM<'_>, ) -> Result; pub const SEC_AMIGO_PROFILING_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_SEC_amigo_profiling\0") }; pub const SEC_AMIGO_PROFILING_SPEC_VERSION: u32 = 1u32; pub const QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_multiview_per_view_viewports\0") }; pub const QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION: u32 = 1u32; pub const NV_RAY_TRACING_INVOCATION_REORDER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing_invocation_reorder\0") }; pub const NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION: u32 = 1u32; pub const NV_EXTENDED_SPARSE_ADDRESS_SPACE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_extended_sparse_address_space\0") }; pub const NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION: u32 = 1u32; pub const EXT_MUTABLE_DESCRIPTOR_TYPE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_mutable_descriptor_type\0") }; pub const EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION: u32 = 1u32; pub const EXT_LAYER_SETTINGS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_layer_settings\0") }; pub const EXT_LAYER_SETTINGS_SPEC_VERSION: u32 = 2u32; pub const ARM_SHADER_CORE_BUILTINS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_ARM_shader_core_builtins\0") }; pub const ARM_SHADER_CORE_BUILTINS_SPEC_VERSION: u32 = 2u32; pub const EXT_PIPELINE_LIBRARY_GROUP_HANDLES_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_library_group_handles\0") }; pub const EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION: u32 = 1u32; pub const EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_dynamic_rendering_unused_attachments\0") }; pub const EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION: u32 = 1u32; pub const NV_LOW_LATENCY2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_low_latency2\0") }; pub const NV_LOW_LATENCY2_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkSetLatencySleepModeNV = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_sleep_mode_info: *const LatencySleepModeInfoNV<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkLatencySleepNV = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_sleep_info: *const LatencySleepInfoNV<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSetLatencyMarkerNV = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_latency_marker_info: *const SetLatencyMarkerInfoNV<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetLatencyTimingsNV = unsafe extern "system" fn( device: crate::vk::Device, swapchain: SwapchainKHR, p_latency_marker_info: *mut GetLatencyMarkerInfoNV<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkQueueNotifyOutOfBandNV = unsafe extern "system" fn(queue: Queue, p_queue_type_info: *const OutOfBandQueueTypeInfoNV<'_>); pub const KHR_COOPERATIVE_MATRIX_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_cooperative_matrix\0") }; pub const KHR_COOPERATIVE_MATRIX_SPEC_VERSION: u32 = 2u32; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut CooperativeMatrixPropertiesKHR<'_>, ) -> Result; pub const QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_multiview_per_view_render_areas\0") }; pub const QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION: u32 = 1u32; pub const KHR_VIDEO_DECODE_AV1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_av1\0") }; pub const KHR_VIDEO_DECODE_AV1_SPEC_VERSION: u32 = 1u32; pub const KHR_VIDEO_MAINTENANCE1_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_maintenance1\0") }; pub const KHR_VIDEO_MAINTENANCE1_SPEC_VERSION: u32 = 1u32; pub const NV_PER_STAGE_DESCRIPTOR_SET_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_per_stage_descriptor_set\0") }; pub const NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION: u32 = 1u32; pub const QCOM_IMAGE_PROCESSING2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_image_processing2\0") }; pub const QCOM_IMAGE_PROCESSING2_SPEC_VERSION: u32 = 1u32; pub const QCOM_FILTER_CUBIC_WEIGHTS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_filter_cubic_weights\0") }; pub const QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION: u32 = 1u32; pub const QCOM_YCBCR_DEGAMMA_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_ycbcr_degamma\0") }; pub const QCOM_YCBCR_DEGAMMA_SPEC_VERSION: u32 = 1u32; pub const QCOM_FILTER_CUBIC_CLAMP_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_filter_cubic_clamp\0") }; pub const QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION: u32 = 1u32; pub const EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_EXT_attachment_feedback_loop_dynamic_state\0") }; pub const EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT = unsafe extern "system" fn(command_buffer: CommandBuffer, aspect_mask: ImageAspectFlags); pub const KHR_VERTEX_ATTRIBUTE_DIVISOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_vertex_attribute_divisor\0") }; pub const KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION: u32 = 1u32; pub const KHR_LOAD_STORE_OP_NONE_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_load_store_op_none\0") }; pub const KHR_LOAD_STORE_OP_NONE_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_FLOAT_CONTROLS2_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_float_controls2\0") }; pub const KHR_SHADER_FLOAT_CONTROLS2_SPEC_VERSION: u32 = 1u32; pub const QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_QNX_external_memory_screen_buffer\0") }; pub const QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkGetScreenBufferPropertiesQNX = unsafe extern "system" fn( device: crate::vk::Device, buffer: *const _screen_buffer, p_properties: *mut ScreenBufferPropertiesQNX<'_>, ) -> Result; pub const MSFT_LAYERED_DRIVER_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_MSFT_layered_driver\0") }; pub const MSFT_LAYERED_DRIVER_SPEC_VERSION: u32 = 1u32; pub const KHR_INDEX_TYPE_UINT8_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_index_type_uint8\0") }; pub const KHR_INDEX_TYPE_UINT8_SPEC_VERSION: u32 = 1u32; pub const KHR_LINE_RASTERIZATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_line_rasterization\0") }; pub const KHR_LINE_RASTERIZATION_SPEC_VERSION: u32 = 1u32; pub const KHR_CALIBRATED_TIMESTAMPS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_calibrated_timestamps\0") }; pub const KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION: u32 = 1u32; pub const KHR_SHADER_EXPECT_ASSUME_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_expect_assume\0") }; pub const KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION: u32 = 1u32; pub const KHR_MAINTENANCE6_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance6\0") }; pub const KHR_MAINTENANCE6_SPEC_VERSION: u32 = 1u32; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindDescriptorSets2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_bind_descriptor_sets_info: *const BindDescriptorSetsInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPushConstants2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_push_constants_info: *const PushConstantsInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPushDescriptorSet2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_push_descriptor_set_info: *const PushDescriptorSetInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPushDescriptorSetWithTemplate2KHR = unsafe extern "system" fn( command_buffer: CommandBuffer, p_push_descriptor_set_with_template_info: *const PushDescriptorSetWithTemplateInfoKHR<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDescriptorBufferOffsets2EXT = unsafe extern "system" fn( command_buffer: CommandBuffer, p_set_descriptor_buffer_offsets_info: *const SetDescriptorBufferOffsetsInfoEXT<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = unsafe extern "system" fn (command_buffer : CommandBuffer , p_bind_descriptor_buffer_embedded_samplers_info : * const BindDescriptorBufferEmbeddedSamplersInfoEXT < '_ > ,) ; pub const NV_DESCRIPTOR_POOL_OVERALLOCATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_descriptor_pool_overallocation\0") }; pub const NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION: u32 = 1u32; pub const NV_RAW_ACCESS_CHAINS_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_raw_access_chains\0") }; pub const NV_RAW_ACCESS_CHAINS_SPEC_VERSION: u32 = 1u32; pub const NV_SHADER_ATOMIC_FLOAT16_VECTOR_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_atomic_float16_vector\0") }; pub const NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION: u32 = 1u32; pub const NV_RAY_TRACING_VALIDATION_NAME: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing_validation\0") }; pub const NV_RAY_TRACING_VALIDATION_SPEC_VERSION: u32 = 1u32; ash-0.38.0+1.3.281/src/vk/feature_extensions.rs000064400000000000000000000633121046102023000167710ustar 00000000000000use crate::vk::bitflags::*; use crate::vk::enums::*; #[doc = "Generated from 'VK_VERSION_1_1'"] impl BufferCreateFlags { #[doc = "Buffer requires protected memory"] pub const PROTECTED: Self = Self(0b1000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl CommandPoolCreateFlags { #[doc = "Command buffers allocated from pool are protected command buffers"] pub const PROTECTED: Self = Self(0b100); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl DependencyFlags { #[doc = "Dependency is across devices"] pub const DEVICE_GROUP: Self = Self(0b100); pub const VIEW_LOCAL: Self = Self(0b10); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl DeviceQueueCreateFlags { #[doc = "Queue is a protected-capable device queue"] pub const PROTECTED: Self = Self(0b1); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl Format { pub const G8B8G8R8_422_UNORM: Self = Self(1_000_156_000); pub const B8G8R8G8_422_UNORM: Self = Self(1_000_156_001); pub const G8_B8_R8_3PLANE_420_UNORM: Self = Self(1_000_156_002); pub const G8_B8R8_2PLANE_420_UNORM: Self = Self(1_000_156_003); pub const G8_B8_R8_3PLANE_422_UNORM: Self = Self(1_000_156_004); pub const G8_B8R8_2PLANE_422_UNORM: Self = Self(1_000_156_005); pub const G8_B8_R8_3PLANE_444_UNORM: Self = Self(1_000_156_006); pub const R10X6_UNORM_PACK16: Self = Self(1_000_156_007); pub const R10X6G10X6_UNORM_2PACK16: Self = Self(1_000_156_008); pub const R10X6G10X6B10X6A10X6_UNORM_4PACK16: Self = Self(1_000_156_009); pub const G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: Self = Self(1_000_156_010); pub const B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: Self = Self(1_000_156_011); pub const G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_012); pub const G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_013); pub const G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_014); pub const G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_015); pub const G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: Self = Self(1_000_156_016); pub const R12X4_UNORM_PACK16: Self = Self(1_000_156_017); pub const R12X4G12X4_UNORM_2PACK16: Self = Self(1_000_156_018); pub const R12X4G12X4B12X4A12X4_UNORM_4PACK16: Self = Self(1_000_156_019); pub const G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: Self = Self(1_000_156_020); pub const B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: Self = Self(1_000_156_021); pub const G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_022); pub const G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_023); pub const G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_024); pub const G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_025); pub const G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: Self = Self(1_000_156_026); pub const G16B16G16R16_422_UNORM: Self = Self(1_000_156_027); pub const B16G16R16G16_422_UNORM: Self = Self(1_000_156_028); pub const G16_B16_R16_3PLANE_420_UNORM: Self = Self(1_000_156_029); pub const G16_B16R16_2PLANE_420_UNORM: Self = Self(1_000_156_030); pub const G16_B16_R16_3PLANE_422_UNORM: Self = Self(1_000_156_031); pub const G16_B16R16_2PLANE_422_UNORM: Self = Self(1_000_156_032); pub const G16_B16_R16_3PLANE_444_UNORM: Self = Self(1_000_156_033); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl FormatFeatureFlags { #[doc = "Format can be used as the source image of image transfer commands"] pub const TRANSFER_SRC: Self = Self(0b100_0000_0000_0000); #[doc = "Format can be used as the destination image of image transfer commands"] pub const TRANSFER_DST: Self = Self(0b1000_0000_0000_0000); #[doc = "Format can have midpoint rather than cosited chroma samples"] pub const MIDPOINT_CHROMA_SAMPLES: Self = Self(0b10_0000_0000_0000_0000); #[doc = "Format can be used with linear filtering whilst color conversion is enabled"] pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER: Self = Self(0b100_0000_0000_0000_0000); #[doc = "Format can have different chroma, min and mag filters"] pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER: Self = Self(0b1000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT: Self = Self(0b1_0000_0000_0000_0000_0000); pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE: Self = Self(0b10_0000_0000_0000_0000_0000); #[doc = "Format supports disjoint planes"] pub const DISJOINT: Self = Self(0b100_0000_0000_0000_0000_0000); #[doc = "Format can have cosited rather than midpoint chroma samples"] pub const COSITED_CHROMA_SAMPLES: Self = Self(0b1000_0000_0000_0000_0000_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl ImageAspectFlags { pub const PLANE_0: Self = Self(0b1_0000); pub const PLANE_1: Self = Self(0b10_0000); pub const PLANE_2: Self = Self(0b100_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl ImageCreateFlags { pub const ALIAS: Self = Self(0b100_0000_0000); #[doc = "Allows using VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions when binding memory to the image"] pub const SPLIT_INSTANCE_BIND_REGIONS: Self = Self(0b100_0000); #[doc = "The 3D image can be viewed as a 2D or 2D array image"] pub const TYPE_2D_ARRAY_COMPATIBLE: Self = Self(0b10_0000); pub const BLOCK_TEXEL_VIEW_COMPATIBLE: Self = Self(0b1000_0000); pub const EXTENDED_USAGE: Self = Self(0b1_0000_0000); #[doc = "Image requires protected memory"] pub const PROTECTED: Self = Self(0b1000_0000_0000); pub const DISJOINT: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl ImageLayout { pub const DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: Self = Self(1_000_117_000); pub const DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: Self = Self(1_000_117_001); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl MemoryHeapFlags { #[doc = "If set, heap allocations allocate multiple instances by default"] pub const MULTI_INSTANCE: Self = Self(0b10); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl MemoryPropertyFlags { #[doc = "Memory is protected"] pub const PROTECTED: Self = Self(0b10_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl ObjectType { pub const SAMPLER_YCBCR_CONVERSION: Self = Self(1_000_156_000); pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = Self(1_000_085_000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl PipelineCreateFlags { pub const VIEW_INDEX_FROM_DEVICE_INDEX: Self = Self(0b1000); pub const DISPATCH_BASE: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl QueueFlags { #[doc = "Queues may support protected operations"] pub const PROTECTED: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl Result { pub const ERROR_OUT_OF_POOL_MEMORY: Self = Self(-1_000_069_000); pub const ERROR_INVALID_EXTERNAL_HANDLE: Self = Self(-1_000_072_003); } #[doc = "Generated from 'VK_VERSION_1_1'"] impl StructureType { pub const PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: Self = Self(1_000_094_000); pub const BIND_BUFFER_MEMORY_INFO: Self = Self(1_000_157_000); pub const BIND_IMAGE_MEMORY_INFO: Self = Self(1_000_157_001); pub const PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: Self = Self(1_000_083_000); pub const MEMORY_DEDICATED_REQUIREMENTS: Self = Self(1_000_127_000); pub const MEMORY_DEDICATED_ALLOCATE_INFO: Self = Self(1_000_127_001); pub const MEMORY_ALLOCATE_FLAGS_INFO: Self = Self(1_000_060_000); pub const DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: Self = Self(1_000_060_003); pub const DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: Self = Self(1_000_060_004); pub const DEVICE_GROUP_SUBMIT_INFO: Self = Self(1_000_060_005); pub const DEVICE_GROUP_BIND_SPARSE_INFO: Self = Self(1_000_060_006); pub const BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: Self = Self(1_000_060_013); pub const BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: Self = Self(1_000_060_014); pub const PHYSICAL_DEVICE_GROUP_PROPERTIES: Self = Self(1_000_070_000); pub const DEVICE_GROUP_DEVICE_CREATE_INFO: Self = Self(1_000_070_001); pub const BUFFER_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_000); pub const IMAGE_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_001); pub const IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_002); pub const MEMORY_REQUIREMENTS_2: Self = Self(1_000_146_003); pub const SPARSE_IMAGE_MEMORY_REQUIREMENTS_2: Self = Self(1_000_146_004); pub const PHYSICAL_DEVICE_FEATURES_2: Self = Self(1_000_059_000); pub const PHYSICAL_DEVICE_PROPERTIES_2: Self = Self(1_000_059_001); pub const FORMAT_PROPERTIES_2: Self = Self(1_000_059_002); pub const IMAGE_FORMAT_PROPERTIES_2: Self = Self(1_000_059_003); pub const PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: Self = Self(1_000_059_004); pub const QUEUE_FAMILY_PROPERTIES_2: Self = Self(1_000_059_005); pub const PHYSICAL_DEVICE_MEMORY_PROPERTIES_2: Self = Self(1_000_059_006); pub const SPARSE_IMAGE_FORMAT_PROPERTIES_2: Self = Self(1_000_059_007); pub const PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2: Self = Self(1_000_059_008); pub const PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: Self = Self(1_000_117_000); pub const RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: Self = Self(1_000_117_001); pub const IMAGE_VIEW_USAGE_CREATE_INFO: Self = Self(1_000_117_002); pub const PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: Self = Self(1_000_117_003); pub const RENDER_PASS_MULTIVIEW_CREATE_INFO: Self = Self(1_000_053_000); pub const PHYSICAL_DEVICE_MULTIVIEW_FEATURES: Self = Self(1_000_053_001); pub const PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: Self = Self(1_000_053_002); pub const PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: Self = Self(1_000_120_000); pub const PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES: Self = Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES; pub const PROTECTED_SUBMIT_INFO: Self = Self(1_000_145_000); pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: Self = Self(1_000_145_001); pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: Self = Self(1_000_145_002); pub const DEVICE_QUEUE_INFO_2: Self = Self(1_000_145_003); pub const SAMPLER_YCBCR_CONVERSION_CREATE_INFO: Self = Self(1_000_156_000); pub const SAMPLER_YCBCR_CONVERSION_INFO: Self = Self(1_000_156_001); pub const BIND_IMAGE_PLANE_MEMORY_INFO: Self = Self(1_000_156_002); pub const IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: Self = Self(1_000_156_003); pub const PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: Self = Self(1_000_156_004); pub const SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: Self = Self(1_000_156_005); pub const DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO: Self = Self(1_000_085_000); pub const PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: Self = Self(1_000_071_000); pub const EXTERNAL_IMAGE_FORMAT_PROPERTIES: Self = Self(1_000_071_001); pub const PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: Self = Self(1_000_071_002); pub const EXTERNAL_BUFFER_PROPERTIES: Self = Self(1_000_071_003); pub const PHYSICAL_DEVICE_ID_PROPERTIES: Self = Self(1_000_071_004); pub const EXTERNAL_MEMORY_BUFFER_CREATE_INFO: Self = Self(1_000_072_000); pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO: Self = Self(1_000_072_001); pub const EXPORT_MEMORY_ALLOCATE_INFO: Self = Self(1_000_072_002); pub const PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: Self = Self(1_000_112_000); pub const EXTERNAL_FENCE_PROPERTIES: Self = Self(1_000_112_001); pub const EXPORT_FENCE_CREATE_INFO: Self = Self(1_000_113_000); pub const EXPORT_SEMAPHORE_CREATE_INFO: Self = Self(1_000_077_000); pub const PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO: Self = Self(1_000_076_000); pub const EXTERNAL_SEMAPHORE_PROPERTIES: Self = Self(1_000_076_001); pub const PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: Self = Self(1_000_168_000); pub const DESCRIPTOR_SET_LAYOUT_SUPPORT: Self = Self(1_000_168_001); pub const PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: Self = Self(1_000_063_000); pub const PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES: Self = Self::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES; } #[doc = "Generated from 'VK_VERSION_1_2'"] impl BufferCreateFlags { pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1_0000); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl BufferUsageFlags { pub const SHADER_DEVICE_ADDRESS: Self = Self(0b10_0000_0000_0000_0000); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl DescriptorPoolCreateFlags { pub const UPDATE_AFTER_BIND: Self = Self(0b10); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl DescriptorSetLayoutCreateFlags { pub const UPDATE_AFTER_BIND_POOL: Self = Self(0b10); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl FormatFeatureFlags { #[doc = "Format can be used with min/max reduction filtering"] pub const SAMPLED_IMAGE_FILTER_MINMAX: Self = Self(0b1_0000_0000_0000_0000); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl FramebufferCreateFlags { pub const IMAGELESS: Self = Self(0b1); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl ImageLayout { pub const DEPTH_ATTACHMENT_OPTIMAL: Self = Self(1_000_241_000); pub const DEPTH_READ_ONLY_OPTIMAL: Self = Self(1_000_241_001); pub const STENCIL_ATTACHMENT_OPTIMAL: Self = Self(1_000_241_002); pub const STENCIL_READ_ONLY_OPTIMAL: Self = Self(1_000_241_003); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl MemoryAllocateFlags { pub const DEVICE_ADDRESS: Self = Self(0b10); pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b100); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl Result { pub const ERROR_FRAGMENTATION: Self = Self(-1_000_161_000); pub const ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: Self = Self(-1_000_257_000); } #[doc = "Generated from 'VK_VERSION_1_2'"] impl StructureType { pub const PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: Self = Self(49); pub const PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES: Self = Self(50); pub const PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: Self = Self(51); pub const PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES: Self = Self(52); pub const IMAGE_FORMAT_LIST_CREATE_INFO: Self = Self(1_000_147_000); pub const ATTACHMENT_DESCRIPTION_2: Self = Self(1_000_109_000); pub const ATTACHMENT_REFERENCE_2: Self = Self(1_000_109_001); pub const SUBPASS_DESCRIPTION_2: Self = Self(1_000_109_002); pub const SUBPASS_DEPENDENCY_2: Self = Self(1_000_109_003); pub const RENDER_PASS_CREATE_INFO_2: Self = Self(1_000_109_004); pub const SUBPASS_BEGIN_INFO: Self = Self(1_000_109_005); pub const SUBPASS_END_INFO: Self = Self(1_000_109_006); pub const PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: Self = Self(1_000_177_000); pub const PHYSICAL_DEVICE_DRIVER_PROPERTIES: Self = Self(1_000_196_000); pub const PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: Self = Self(1_000_180_000); pub const PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: Self = Self(1_000_082_000); pub const PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES: Self = Self(1_000_197_000); pub const DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO: Self = Self(1_000_161_000); pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: Self = Self(1_000_161_001); pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: Self = Self(1_000_161_002); pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO: Self = Self(1_000_161_003); pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: Self = Self(1_000_161_004); pub const PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: Self = Self(1_000_199_000); pub const SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE: Self = Self(1_000_199_001); pub const PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: Self = Self(1_000_221_000); pub const IMAGE_STENCIL_USAGE_CREATE_INFO: Self = Self(1_000_246_000); pub const PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: Self = Self(1_000_130_000); pub const SAMPLER_REDUCTION_MODE_CREATE_INFO: Self = Self(1_000_130_001); pub const PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: Self = Self(1_000_211_000); pub const PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: Self = Self(1_000_108_000); pub const FRAMEBUFFER_ATTACHMENTS_CREATE_INFO: Self = Self(1_000_108_001); pub const FRAMEBUFFER_ATTACHMENT_IMAGE_INFO: Self = Self(1_000_108_002); pub const RENDER_PASS_ATTACHMENT_BEGIN_INFO: Self = Self(1_000_108_003); pub const PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: Self = Self(1_000_253_000); pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: Self = Self(1_000_175_000); pub const PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: Self = Self(1_000_241_000); pub const ATTACHMENT_REFERENCE_STENCIL_LAYOUT: Self = Self(1_000_241_001); pub const ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT: Self = Self(1_000_241_002); pub const PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: Self = Self(1_000_261_000); pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: Self = Self(1_000_207_000); pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: Self = Self(1_000_207_001); pub const SEMAPHORE_TYPE_CREATE_INFO: Self = Self(1_000_207_002); pub const TIMELINE_SEMAPHORE_SUBMIT_INFO: Self = Self(1_000_207_003); pub const SEMAPHORE_WAIT_INFO: Self = Self(1_000_207_004); pub const SEMAPHORE_SIGNAL_INFO: Self = Self(1_000_207_005); pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: Self = Self(1_000_257_000); pub const BUFFER_DEVICE_ADDRESS_INFO: Self = Self(1_000_244_001); pub const BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO: Self = Self(1_000_257_002); pub const MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO: Self = Self(1_000_257_003); pub const DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO: Self = Self(1_000_257_004); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl AccessFlags { pub const NONE: Self = Self(0); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl AttachmentStoreOp { pub const NONE: Self = Self(1_000_301_000); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl DescriptorType { pub const INLINE_UNIFORM_BLOCK: Self = Self(1_000_138_000); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl DynamicState { pub const CULL_MODE: Self = Self(1_000_267_000); pub const FRONT_FACE: Self = Self(1_000_267_001); pub const PRIMITIVE_TOPOLOGY: Self = Self(1_000_267_002); pub const VIEWPORT_WITH_COUNT: Self = Self(1_000_267_003); pub const SCISSOR_WITH_COUNT: Self = Self(1_000_267_004); pub const VERTEX_INPUT_BINDING_STRIDE: Self = Self(1_000_267_005); pub const DEPTH_TEST_ENABLE: Self = Self(1_000_267_006); pub const DEPTH_WRITE_ENABLE: Self = Self(1_000_267_007); pub const DEPTH_COMPARE_OP: Self = Self(1_000_267_008); pub const DEPTH_BOUNDS_TEST_ENABLE: Self = Self(1_000_267_009); pub const STENCIL_TEST_ENABLE: Self = Self(1_000_267_010); pub const STENCIL_OP: Self = Self(1_000_267_011); pub const RASTERIZER_DISCARD_ENABLE: Self = Self(1_000_377_001); pub const DEPTH_BIAS_ENABLE: Self = Self(1_000_377_002); pub const PRIMITIVE_RESTART_ENABLE: Self = Self(1_000_377_004); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl EventCreateFlags { pub const DEVICE_ONLY: Self = Self(0b1); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl Format { pub const G8_B8R8_2PLANE_444_UNORM: Self = Self(1_000_330_000); pub const G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: Self = Self(1_000_330_001); pub const G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: Self = Self(1_000_330_002); pub const G16_B16R16_2PLANE_444_UNORM: Self = Self(1_000_330_003); pub const A4R4G4B4_UNORM_PACK16: Self = Self(1_000_340_000); pub const A4B4G4R4_UNORM_PACK16: Self = Self(1_000_340_001); pub const ASTC_4X4_SFLOAT_BLOCK: Self = Self(1_000_066_000); pub const ASTC_5X4_SFLOAT_BLOCK: Self = Self(1_000_066_001); pub const ASTC_5X5_SFLOAT_BLOCK: Self = Self(1_000_066_002); pub const ASTC_6X5_SFLOAT_BLOCK: Self = Self(1_000_066_003); pub const ASTC_6X6_SFLOAT_BLOCK: Self = Self(1_000_066_004); pub const ASTC_8X5_SFLOAT_BLOCK: Self = Self(1_000_066_005); pub const ASTC_8X6_SFLOAT_BLOCK: Self = Self(1_000_066_006); pub const ASTC_8X8_SFLOAT_BLOCK: Self = Self(1_000_066_007); pub const ASTC_10X5_SFLOAT_BLOCK: Self = Self(1_000_066_008); pub const ASTC_10X6_SFLOAT_BLOCK: Self = Self(1_000_066_009); pub const ASTC_10X8_SFLOAT_BLOCK: Self = Self(1_000_066_010); pub const ASTC_10X10_SFLOAT_BLOCK: Self = Self(1_000_066_011); pub const ASTC_12X10_SFLOAT_BLOCK: Self = Self(1_000_066_012); pub const ASTC_12X12_SFLOAT_BLOCK: Self = Self(1_000_066_013); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl ImageAspectFlags { pub const NONE: Self = Self(0); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl ImageLayout { pub const READ_ONLY_OPTIMAL: Self = Self(1_000_314_000); pub const ATTACHMENT_OPTIMAL: Self = Self(1_000_314_001); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl ObjectType { pub const PRIVATE_DATA_SLOT: Self = Self(1_000_295_000); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl PipelineCacheCreateFlags { pub const EXTERNALLY_SYNCHRONIZED: Self = Self(0b1); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl PipelineCreateFlags { pub const FAIL_ON_PIPELINE_COMPILE_REQUIRED: Self = Self(0b1_0000_0000); pub const EARLY_RETURN_ON_FAILURE: Self = Self(0b10_0000_0000); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl PipelineShaderStageCreateFlags { pub const ALLOW_VARYING_SUBGROUP_SIZE: Self = Self(0b1); pub const REQUIRE_FULL_SUBGROUPS: Self = Self(0b10); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl PipelineStageFlags { pub const NONE: Self = Self(0); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl Result { pub const PIPELINE_COMPILE_REQUIRED: Self = Self(1_000_297_000); } #[doc = "Generated from 'VK_VERSION_1_3'"] impl StructureType { pub const PHYSICAL_DEVICE_VULKAN_1_3_FEATURES: Self = Self(53); pub const PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES: Self = Self(54); pub const PIPELINE_CREATION_FEEDBACK_CREATE_INFO: Self = Self(1_000_192_000); pub const PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: Self = Self(1_000_215_000); pub const PHYSICAL_DEVICE_TOOL_PROPERTIES: Self = Self(1_000_245_000); pub const PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: Self = Self(1_000_276_000); pub const PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: Self = Self(1_000_295_000); pub const DEVICE_PRIVATE_DATA_CREATE_INFO: Self = Self(1_000_295_001); pub const PRIVATE_DATA_SLOT_CREATE_INFO: Self = Self(1_000_295_002); pub const PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: Self = Self(1_000_297_000); pub const MEMORY_BARRIER_2: Self = Self(1_000_314_000); pub const BUFFER_MEMORY_BARRIER_2: Self = Self(1_000_314_001); pub const IMAGE_MEMORY_BARRIER_2: Self = Self(1_000_314_002); pub const DEPENDENCY_INFO: Self = Self(1_000_314_003); pub const SUBMIT_INFO_2: Self = Self(1_000_314_004); pub const SEMAPHORE_SUBMIT_INFO: Self = Self(1_000_314_005); pub const COMMAND_BUFFER_SUBMIT_INFO: Self = Self(1_000_314_006); pub const PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: Self = Self(1_000_314_007); pub const PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: Self = Self(1_000_325_000); pub const PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: Self = Self(1_000_335_000); pub const COPY_BUFFER_INFO_2: Self = Self(1_000_337_000); pub const COPY_IMAGE_INFO_2: Self = Self(1_000_337_001); pub const COPY_BUFFER_TO_IMAGE_INFO_2: Self = Self(1_000_337_002); pub const COPY_IMAGE_TO_BUFFER_INFO_2: Self = Self(1_000_337_003); pub const BLIT_IMAGE_INFO_2: Self = Self(1_000_337_004); pub const RESOLVE_IMAGE_INFO_2: Self = Self(1_000_337_005); pub const BUFFER_COPY_2: Self = Self(1_000_337_006); pub const IMAGE_COPY_2: Self = Self(1_000_337_007); pub const IMAGE_BLIT_2: Self = Self(1_000_337_008); pub const BUFFER_IMAGE_COPY_2: Self = Self(1_000_337_009); pub const IMAGE_RESOLVE_2: Self = Self(1_000_337_010); pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: Self = Self(1_000_225_000); pub const PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO: Self = Self(1_000_225_001); pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: Self = Self(1_000_225_002); pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: Self = Self(1_000_138_000); pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: Self = Self(1_000_138_001); pub const WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK: Self = Self(1_000_138_002); pub const DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO: Self = Self(1_000_138_003); pub const PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: Self = Self(1_000_066_000); pub const RENDERING_INFO: Self = Self(1_000_044_000); pub const RENDERING_ATTACHMENT_INFO: Self = Self(1_000_044_001); pub const PIPELINE_RENDERING_CREATE_INFO: Self = Self(1_000_044_002); pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: Self = Self(1_000_044_003); pub const COMMAND_BUFFER_INHERITANCE_RENDERING_INFO: Self = Self(1_000_044_004); pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: Self = Self(1_000_280_000); pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: Self = Self(1_000_280_001); pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: Self = Self(1_000_281_001); pub const FORMAT_PROPERTIES_3: Self = Self(1_000_360_000); pub const PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: Self = Self(1_000_413_000); pub const PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: Self = Self(1_000_413_001); pub const DEVICE_BUFFER_MEMORY_REQUIREMENTS: Self = Self(1_000_413_002); pub const DEVICE_IMAGE_MEMORY_REQUIREMENTS: Self = Self(1_000_413_003); } ash-0.38.0+1.3.281/src/vk/features.rs000064400000000000000000000762141046102023000147020ustar 00000000000000use crate::vk::bitflags::*; use crate::vk::definitions::*; use crate::vk::enums::*; use core::ffi::*; #[allow(non_camel_case_types)] pub type PFN_vkGetInstanceProcAddr = unsafe extern "system" fn( instance: crate::vk::Instance, p_name: *const c_char, ) -> PFN_vkVoidFunction; #[allow(non_camel_case_types)] pub type PFN_vkCreateInstance = unsafe extern "system" fn( p_create_info: *const InstanceCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_instance: *mut crate::vk::Instance, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkEnumerateInstanceExtensionProperties = unsafe extern "system" fn( p_layer_name: *const c_char, p_property_count: *mut u32, p_properties: *mut ExtensionProperties, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkEnumerateInstanceLayerProperties = unsafe extern "system" fn( p_property_count: *mut u32, p_properties: *mut LayerProperties, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyInstance = unsafe extern "system" fn( instance: crate::vk::Instance, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkEnumeratePhysicalDevices = unsafe extern "system" fn( instance: crate::vk::Instance, p_physical_device_count: *mut u32, p_physical_devices: *mut PhysicalDevice, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceFeatures = unsafe extern "system" fn( physical_device: PhysicalDevice, p_features: *mut PhysicalDeviceFeatures, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceFormatProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, format: Format, p_format_properties: *mut FormatProperties, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceImageFormatProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, format: Format, ty: ImageType, tiling: ImageTiling, usage: ImageUsageFlags, flags: ImageCreateFlags, p_image_format_properties: *mut ImageFormatProperties, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_properties: *mut PhysicalDeviceProperties, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_queue_family_property_count: *mut u32, p_queue_family_properties: *mut QueueFamilyProperties, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceMemoryProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_memory_properties: *mut PhysicalDeviceMemoryProperties, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceProcAddr = unsafe extern "system" fn( device: crate::vk::Device, p_name: *const c_char, ) -> PFN_vkVoidFunction; #[allow(non_camel_case_types)] pub type PFN_vkCreateDevice = unsafe extern "system" fn( physical_device: PhysicalDevice, p_create_info: *const DeviceCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_device: *mut crate::vk::Device, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkEnumerateDeviceExtensionProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_layer_name: *const c_char, p_property_count: *mut u32, p_properties: *mut ExtensionProperties, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkEnumerateDeviceLayerProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, p_property_count: *mut u32, p_properties: *mut LayerProperties, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties = unsafe extern "system" fn( physical_device: PhysicalDevice, format: Format, ty: ImageType, samples: SampleCountFlags, usage: ImageUsageFlags, tiling: ImageTiling, p_property_count: *mut u32, p_properties: *mut SparseImageFormatProperties, ); #[allow(non_camel_case_types)] pub type PFN_vkDestroyDevice = unsafe extern "system" fn( device: crate::vk::Device, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceQueue = unsafe extern "system" fn( device: crate::vk::Device, queue_family_index: u32, queue_index: u32, p_queue: *mut Queue, ); #[allow(non_camel_case_types)] pub type PFN_vkQueueSubmit = unsafe extern "system" fn( queue: Queue, submit_count: u32, p_submits: *const SubmitInfo<'_>, fence: Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkQueueWaitIdle = unsafe extern "system" fn(queue: Queue) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDeviceWaitIdle = unsafe extern "system" fn(device: crate::vk::Device) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAllocateMemory = unsafe extern "system" fn( device: crate::vk::Device, p_allocate_info: *const MemoryAllocateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_memory: *mut DeviceMemory, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkFreeMemory = unsafe extern "system" fn( device: crate::vk::Device, memory: DeviceMemory, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkMapMemory = unsafe extern "system" fn( device: crate::vk::Device, memory: DeviceMemory, offset: DeviceSize, size: DeviceSize, flags: MemoryMapFlags, pp_data: *mut *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkUnmapMemory = unsafe extern "system" fn(device: crate::vk::Device, memory: DeviceMemory); #[allow(non_camel_case_types)] pub type PFN_vkFlushMappedMemoryRanges = unsafe extern "system" fn( device: crate::vk::Device, memory_range_count: u32, p_memory_ranges: *const MappedMemoryRange<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkInvalidateMappedMemoryRanges = unsafe extern "system" fn( device: crate::vk::Device, memory_range_count: u32, p_memory_ranges: *const MappedMemoryRange<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceMemoryCommitment = unsafe extern "system" fn( device: crate::vk::Device, memory: DeviceMemory, p_committed_memory_in_bytes: *mut DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkBindBufferMemory = unsafe extern "system" fn( device: crate::vk::Device, buffer: Buffer, memory: DeviceMemory, memory_offset: DeviceSize, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkBindImageMemory = unsafe extern "system" fn( device: crate::vk::Device, image: Image, memory: DeviceMemory, memory_offset: DeviceSize, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetBufferMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, buffer: Buffer, p_memory_requirements: *mut MemoryRequirements, ); #[allow(non_camel_case_types)] pub type PFN_vkGetImageMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_memory_requirements: *mut MemoryRequirements, ); #[allow(non_camel_case_types)] pub type PFN_vkGetImageSparseMemoryRequirements = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_sparse_memory_requirement_count: *mut u32, p_sparse_memory_requirements: *mut SparseImageMemoryRequirements, ); #[allow(non_camel_case_types)] pub type PFN_vkQueueBindSparse = unsafe extern "system" fn( queue: Queue, bind_info_count: u32, p_bind_info: *const BindSparseInfo<'_>, fence: Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateFence = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const FenceCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_fence: *mut Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyFence = unsafe extern "system" fn( device: crate::vk::Device, fence: Fence, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkResetFences = unsafe extern "system" fn( device: crate::vk::Device, fence_count: u32, p_fences: *const Fence, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetFenceStatus = unsafe extern "system" fn(device: crate::vk::Device, fence: Fence) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkWaitForFences = unsafe extern "system" fn( device: crate::vk::Device, fence_count: u32, p_fences: *const Fence, wait_all: Bool32, timeout: u64, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateSemaphore = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const SemaphoreCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_semaphore: *mut Semaphore, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroySemaphore = unsafe extern "system" fn( device: crate::vk::Device, semaphore: Semaphore, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateEvent = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const EventCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_event: *mut Event, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyEvent = unsafe extern "system" fn( device: crate::vk::Device, event: Event, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetEventStatus = unsafe extern "system" fn(device: crate::vk::Device, event: Event) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkSetEvent = unsafe extern "system" fn(device: crate::vk::Device, event: Event) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkResetEvent = unsafe extern "system" fn(device: crate::vk::Device, event: Event) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateQueryPool = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const QueryPoolCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_query_pool: *mut QueryPool, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyQueryPool = unsafe extern "system" fn( device: crate::vk::Device, query_pool: QueryPool, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetQueryPoolResults = unsafe extern "system" fn( device: crate::vk::Device, query_pool: QueryPool, first_query: u32, query_count: u32, data_size: usize, p_data: *mut c_void, stride: DeviceSize, flags: QueryResultFlags, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateBuffer = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const BufferCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_buffer: *mut Buffer, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyBuffer = unsafe extern "system" fn( device: crate::vk::Device, buffer: Buffer, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateBufferView = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const BufferViewCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_view: *mut BufferView, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyBufferView = unsafe extern "system" fn( device: crate::vk::Device, buffer_view: BufferView, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateImage = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ImageCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_image: *mut Image, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyImage = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetImageSubresourceLayout = unsafe extern "system" fn( device: crate::vk::Device, image: Image, p_subresource: *const ImageSubresource, p_layout: *mut SubresourceLayout, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateImageView = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ImageViewCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_view: *mut ImageView, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyImageView = unsafe extern "system" fn( device: crate::vk::Device, image_view: ImageView, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateShaderModule = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const ShaderModuleCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_shader_module: *mut ShaderModule, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyShaderModule = unsafe extern "system" fn( device: crate::vk::Device, shader_module: ShaderModule, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreatePipelineCache = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const PipelineCacheCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipeline_cache: *mut PipelineCache, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyPipelineCache = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetPipelineCacheData = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, p_data_size: *mut usize, p_data: *mut c_void, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkMergePipelineCaches = unsafe extern "system" fn( device: crate::vk::Device, dst_cache: PipelineCache, src_cache_count: u32, p_src_caches: *const PipelineCache, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateGraphicsPipelines = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, create_info_count: u32, p_create_infos: *const GraphicsPipelineCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipelines: *mut Pipeline, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCreateComputePipelines = unsafe extern "system" fn( device: crate::vk::Device, pipeline_cache: PipelineCache, create_info_count: u32, p_create_infos: *const ComputePipelineCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipelines: *mut Pipeline, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyPipeline = unsafe extern "system" fn( device: crate::vk::Device, pipeline: Pipeline, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreatePipelineLayout = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const PipelineLayoutCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_pipeline_layout: *mut PipelineLayout, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyPipelineLayout = unsafe extern "system" fn( device: crate::vk::Device, pipeline_layout: PipelineLayout, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateSampler = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const SamplerCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_sampler: *mut Sampler, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroySampler = unsafe extern "system" fn( device: crate::vk::Device, sampler: Sampler, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateDescriptorSetLayout = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const DescriptorSetLayoutCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_set_layout: *mut DescriptorSetLayout, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDescriptorSetLayout = unsafe extern "system" fn( device: crate::vk::Device, descriptor_set_layout: DescriptorSetLayout, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateDescriptorPool = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const DescriptorPoolCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_descriptor_pool: *mut DescriptorPool, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyDescriptorPool = unsafe extern "system" fn( device: crate::vk::Device, descriptor_pool: DescriptorPool, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkResetDescriptorPool = unsafe extern "system" fn( device: crate::vk::Device, descriptor_pool: DescriptorPool, flags: DescriptorPoolResetFlags, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAllocateDescriptorSets = unsafe extern "system" fn( device: crate::vk::Device, p_allocate_info: *const DescriptorSetAllocateInfo<'_>, p_descriptor_sets: *mut DescriptorSet, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkFreeDescriptorSets = unsafe extern "system" fn( device: crate::vk::Device, descriptor_pool: DescriptorPool, descriptor_set_count: u32, p_descriptor_sets: *const DescriptorSet, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkUpdateDescriptorSets = unsafe extern "system" fn( device: crate::vk::Device, descriptor_write_count: u32, p_descriptor_writes: *const WriteDescriptorSet<'_>, descriptor_copy_count: u32, p_descriptor_copies: *const CopyDescriptorSet<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateFramebuffer = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const FramebufferCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_framebuffer: *mut Framebuffer, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyFramebuffer = unsafe extern "system" fn( device: crate::vk::Device, framebuffer: Framebuffer, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateRenderPass = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const RenderPassCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_render_pass: *mut RenderPass, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyRenderPass = unsafe extern "system" fn( device: crate::vk::Device, render_pass: RenderPass, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkGetRenderAreaGranularity = unsafe extern "system" fn( device: crate::vk::Device, render_pass: RenderPass, p_granularity: *mut Extent2D, ); #[allow(non_camel_case_types)] pub type PFN_vkCreateCommandPool = unsafe extern "system" fn( device: crate::vk::Device, p_create_info: *const CommandPoolCreateInfo<'_>, p_allocator: *const AllocationCallbacks<'_>, p_command_pool: *mut CommandPool, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkDestroyCommandPool = unsafe extern "system" fn( device: crate::vk::Device, command_pool: CommandPool, p_allocator: *const AllocationCallbacks<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkResetCommandPool = unsafe extern "system" fn( device: crate::vk::Device, command_pool: CommandPool, flags: CommandPoolResetFlags, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkAllocateCommandBuffers = unsafe extern "system" fn( device: crate::vk::Device, p_allocate_info: *const CommandBufferAllocateInfo<'_>, p_command_buffers: *mut CommandBuffer, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkFreeCommandBuffers = unsafe extern "system" fn( device: crate::vk::Device, command_pool: CommandPool, command_buffer_count: u32, p_command_buffers: *const CommandBuffer, ); #[allow(non_camel_case_types)] pub type PFN_vkBeginCommandBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, p_begin_info: *const CommandBufferBeginInfo<'_>, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkEndCommandBuffer = unsafe extern "system" fn(command_buffer: CommandBuffer) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkResetCommandBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, flags: CommandBufferResetFlags, ) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkCmdBindPipeline = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, pipeline: Pipeline, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetViewport = unsafe extern "system" fn( command_buffer: CommandBuffer, first_viewport: u32, viewport_count: u32, p_viewports: *const Viewport, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetScissor = unsafe extern "system" fn( command_buffer: CommandBuffer, first_scissor: u32, scissor_count: u32, p_scissors: *const Rect2D, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetLineWidth = unsafe extern "system" fn(command_buffer: CommandBuffer, line_width: f32); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthBias = unsafe extern "system" fn( command_buffer: CommandBuffer, depth_bias_constant_factor: f32, depth_bias_clamp: f32, depth_bias_slope_factor: f32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetBlendConstants = unsafe extern "system" fn(command_buffer: CommandBuffer, blend_constants: *const [f32; 4usize]); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetDepthBounds = unsafe extern "system" fn( command_buffer: CommandBuffer, min_depth_bounds: f32, max_depth_bounds: f32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetStencilCompareMask = unsafe extern "system" fn( command_buffer: CommandBuffer, face_mask: StencilFaceFlags, compare_mask: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetStencilWriteMask = unsafe extern "system" fn( command_buffer: CommandBuffer, face_mask: StencilFaceFlags, write_mask: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetStencilReference = unsafe extern "system" fn( command_buffer: CommandBuffer, face_mask: StencilFaceFlags, reference: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindDescriptorSets = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_bind_point: PipelineBindPoint, layout: PipelineLayout, first_set: u32, descriptor_set_count: u32, p_descriptor_sets: *const DescriptorSet, dynamic_offset_count: u32, p_dynamic_offsets: *const u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindIndexBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, index_type: IndexType, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBindVertexBuffers = unsafe extern "system" fn( command_buffer: CommandBuffer, first_binding: u32, binding_count: u32, p_buffers: *const Buffer, p_offsets: *const DeviceSize, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDraw = unsafe extern "system" fn( command_buffer: CommandBuffer, vertex_count: u32, instance_count: u32, first_vertex: u32, first_instance: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndexed = unsafe extern "system" fn( command_buffer: CommandBuffer, index_count: u32, instance_count: u32, first_index: u32, vertex_offset: i32, first_instance: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndirect = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, draw_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDrawIndexedIndirect = unsafe extern "system" fn( command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize, draw_count: u32, stride: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatch = unsafe extern "system" fn( command_buffer: CommandBuffer, group_count_x: u32, group_count_y: u32, group_count_z: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdDispatchIndirect = unsafe extern "system" fn(command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, src_buffer: Buffer, dst_buffer: Buffer, region_count: u32, p_regions: *const BufferCopy, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyImage = unsafe extern "system" fn( command_buffer: CommandBuffer, src_image: Image, src_image_layout: ImageLayout, dst_image: Image, dst_image_layout: ImageLayout, region_count: u32, p_regions: *const ImageCopy, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBlitImage = unsafe extern "system" fn( command_buffer: CommandBuffer, src_image: Image, src_image_layout: ImageLayout, dst_image: Image, dst_image_layout: ImageLayout, region_count: u32, p_regions: *const ImageBlit, filter: Filter, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyBufferToImage = unsafe extern "system" fn( command_buffer: CommandBuffer, src_buffer: Buffer, dst_image: Image, dst_image_layout: ImageLayout, region_count: u32, p_regions: *const BufferImageCopy, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyImageToBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, src_image: Image, src_image_layout: ImageLayout, dst_buffer: Buffer, region_count: u32, p_regions: *const BufferImageCopy, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdUpdateBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, dst_buffer: Buffer, dst_offset: DeviceSize, data_size: DeviceSize, p_data: *const c_void, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdFillBuffer = unsafe extern "system" fn( command_buffer: CommandBuffer, dst_buffer: Buffer, dst_offset: DeviceSize, size: DeviceSize, data: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdClearColorImage = unsafe extern "system" fn( command_buffer: CommandBuffer, image: Image, image_layout: ImageLayout, p_color: *const ClearColorValue, range_count: u32, p_ranges: *const ImageSubresourceRange, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdClearDepthStencilImage = unsafe extern "system" fn( command_buffer: CommandBuffer, image: Image, image_layout: ImageLayout, p_depth_stencil: *const ClearDepthStencilValue, range_count: u32, p_ranges: *const ImageSubresourceRange, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdClearAttachments = unsafe extern "system" fn( command_buffer: CommandBuffer, attachment_count: u32, p_attachments: *const ClearAttachment, rect_count: u32, p_rects: *const ClearRect, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdResolveImage = unsafe extern "system" fn( command_buffer: CommandBuffer, src_image: Image, src_image_layout: ImageLayout, dst_image: Image, dst_image_layout: ImageLayout, region_count: u32, p_regions: *const ImageResolve, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdSetEvent = unsafe extern "system" fn( command_buffer: CommandBuffer, event: Event, stage_mask: PipelineStageFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdResetEvent = unsafe extern "system" fn( command_buffer: CommandBuffer, event: Event, stage_mask: PipelineStageFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdWaitEvents = unsafe extern "system" fn( command_buffer: CommandBuffer, event_count: u32, p_events: *const Event, src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags, memory_barrier_count: u32, p_memory_barriers: *const MemoryBarrier<'_>, buffer_memory_barrier_count: u32, p_buffer_memory_barriers: *const BufferMemoryBarrier<'_>, image_memory_barrier_count: u32, p_image_memory_barriers: *const ImageMemoryBarrier<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPipelineBarrier = unsafe extern "system" fn( command_buffer: CommandBuffer, src_stage_mask: PipelineStageFlags, dst_stage_mask: PipelineStageFlags, dependency_flags: DependencyFlags, memory_barrier_count: u32, p_memory_barriers: *const MemoryBarrier<'_>, buffer_memory_barrier_count: u32, p_buffer_memory_barriers: *const BufferMemoryBarrier<'_>, image_memory_barrier_count: u32, p_image_memory_barriers: *const ImageMemoryBarrier<'_>, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginQuery = unsafe extern "system" fn( command_buffer: CommandBuffer, query_pool: QueryPool, query: u32, flags: QueryControlFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndQuery = unsafe extern "system" fn(command_buffer: CommandBuffer, query_pool: QueryPool, query: u32); #[allow(non_camel_case_types)] pub type PFN_vkCmdResetQueryPool = unsafe extern "system" fn( command_buffer: CommandBuffer, query_pool: QueryPool, first_query: u32, query_count: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdWriteTimestamp = unsafe extern "system" fn( command_buffer: CommandBuffer, pipeline_stage: PipelineStageFlags, query_pool: QueryPool, query: u32, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdCopyQueryPoolResults = unsafe extern "system" fn( command_buffer: CommandBuffer, query_pool: QueryPool, first_query: u32, query_count: u32, dst_buffer: Buffer, dst_offset: DeviceSize, stride: DeviceSize, flags: QueryResultFlags, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdPushConstants = unsafe extern "system" fn( command_buffer: CommandBuffer, layout: PipelineLayout, stage_flags: ShaderStageFlags, offset: u32, size: u32, p_values: *const c_void, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdBeginRenderPass = unsafe extern "system" fn( command_buffer: CommandBuffer, p_render_pass_begin: *const RenderPassBeginInfo<'_>, contents: SubpassContents, ); #[allow(non_camel_case_types)] pub type PFN_vkCmdNextSubpass = unsafe extern "system" fn(command_buffer: CommandBuffer, contents: SubpassContents); #[allow(non_camel_case_types)] pub type PFN_vkCmdEndRenderPass = unsafe extern "system" fn(command_buffer: CommandBuffer); #[allow(non_camel_case_types)] pub type PFN_vkCmdExecuteCommands = unsafe extern "system" fn( command_buffer: CommandBuffer, command_buffer_count: u32, p_command_buffers: *const CommandBuffer, ); #[allow(non_camel_case_types)] pub type PFN_vkEnumerateInstanceVersion = unsafe extern "system" fn(p_api_version: *mut u32) -> Result; #[allow(non_camel_case_types)] pub type PFN_vkGetDeviceQueue2 = unsafe extern "system" fn( device: crate::vk::Device, p_queue_info: *const DeviceQueueInfo2<'_>, p_queue: *mut Queue, ); ash-0.38.0+1.3.281/src/vk/macros.rs000064400000000000000000000113671046102023000143460ustar 00000000000000#[macro_export] macro_rules! vk_bitflags_wrapped { ($ name : ident , $ flag_type : ty) => { impl Default for $name { fn default() -> Self { Self(0) } } impl $name { #[inline] pub const fn empty() -> Self { Self(0) } #[inline] pub const fn from_raw(x: $flag_type) -> Self { Self(x) } #[inline] pub const fn as_raw(self) -> $flag_type { self.0 } #[inline] pub const fn is_empty(self) -> bool { self.0 == Self::empty().0 } #[inline] pub const fn intersects(self, other: Self) -> bool { !Self(self.0 & other.0).is_empty() } #[doc = r" Returns whether `other` is a subset of `self`"] #[inline] pub const fn contains(self, other: Self) -> bool { self.0 & other.0 == other.0 } } impl ::core::ops::BitOr for $name { type Output = Self; #[inline] fn bitor(self, rhs: Self) -> Self { Self(self.0 | rhs.0) } } impl ::core::ops::BitOrAssign for $name { #[inline] fn bitor_assign(&mut self, rhs: Self) { *self = *self | rhs } } impl ::core::ops::BitAnd for $name { type Output = Self; #[inline] fn bitand(self, rhs: Self) -> Self { Self(self.0 & rhs.0) } } impl ::core::ops::BitAndAssign for $name { #[inline] fn bitand_assign(&mut self, rhs: Self) { *self = *self & rhs } } impl ::core::ops::BitXor for $name { type Output = Self; #[inline] fn bitxor(self, rhs: Self) -> Self { Self(self.0 ^ rhs.0) } } impl ::core::ops::BitXorAssign for $name { #[inline] fn bitxor_assign(&mut self, rhs: Self) { *self = *self ^ rhs } } impl ::core::ops::Not for $name { type Output = Self; #[inline] fn not(self) -> Self { Self(!self.0) } } }; } #[macro_export] macro_rules! handle_nondispatchable { ($ name : ident , $ ty : ident) => { handle_nondispatchable!($name, $ty, doc = ""); }; ($ name : ident , $ ty : ident , $ doc_link : meta) => { #[repr(transparent)] #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Default)] #[$doc_link] pub struct $name(u64); impl Handle for $name { const TYPE: ObjectType = ObjectType::$ty; fn as_raw(self) -> u64 { self.0 } fn from_raw(x: u64) -> Self { Self(x) } } impl $name { pub const fn null() -> Self { Self(0) } } impl fmt::Pointer for $name { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "0x{:x}", self.0) } } impl fmt::Debug for $name { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "0x{:x}", self.0) } } }; } #[macro_export] macro_rules! define_handle { ($ name : ident , $ ty : ident) => { define_handle!($name, $ty, doc = ""); }; ($ name : ident , $ ty : ident , $ doc_link : meta) => { #[repr(transparent)] #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash)] #[$doc_link] pub struct $name(*mut u8); impl Default for $name { fn default() -> Self { Self::null() } } impl Handle for $name { const TYPE: ObjectType = ObjectType::$ty; fn as_raw(self) -> u64 { self.0 as u64 } fn from_raw(x: u64) -> Self { Self(x as _) } } unsafe impl Send for $name {} unsafe impl Sync for $name {} impl $name { pub const fn null() -> Self { Self(::core::ptr::null_mut()) } } impl fmt::Pointer for $name { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&self.0, f) } } impl fmt::Debug for $name { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&self.0, f) } } }; } ash-0.38.0+1.3.281/src/vk/native.rs000064400000000000000000015312431046102023000143510ustar 00000000000000/* automatically generated by rust-bindgen 0.69.4 */ #[repr(C)] #[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] pub struct __BindgenBitfieldUnit { storage: Storage, } impl __BindgenBitfieldUnit { #[inline] pub const fn new(storage: Storage) -> Self { Self { storage } } } impl __BindgenBitfieldUnit where Storage: AsRef<[u8]> + AsMut<[u8]>, { #[inline] pub fn get_bit(&self, index: usize) -> bool { debug_assert!(index / 8 < self.storage.as_ref().len()); let byte_index = index / 8; let byte = self.storage.as_ref()[byte_index]; let bit_index = if cfg!(target_endian = "big") { 7 - (index % 8) } else { index % 8 }; let mask = 1 << bit_index; byte & mask == mask } #[inline] pub fn set_bit(&mut self, index: usize, val: bool) { debug_assert!(index / 8 < self.storage.as_ref().len()); let byte_index = index / 8; let byte = &mut self.storage.as_mut()[byte_index]; let bit_index = if cfg!(target_endian = "big") { 7 - (index % 8) } else { index % 8 }; let mask = 1 << bit_index; if val { *byte |= mask; } else { *byte &= !mask; } } #[inline] pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 { debug_assert!(bit_width <= 64); debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len()); let mut val = 0; for i in 0..(bit_width as usize) { if self.get_bit(i + bit_offset) { let index = if cfg!(target_endian = "big") { bit_width as usize - 1 - i } else { i }; val |= 1 << index; } } val } #[inline] pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) { debug_assert!(bit_width <= 64); debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len()); for i in 0..(bit_width as usize) { let mask = 1 << i; let val_bit_is_set = val & mask == mask; let index = if cfg!(target_endian = "big") { bit_width as usize - 1 - i } else { i }; self.set_bit(index + bit_offset, val_bit_is_set); } } } pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME: StdVideoH264ChromaFormatIdc = 0; pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_420: StdVideoH264ChromaFormatIdc = 1; pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_422: StdVideoH264ChromaFormatIdc = 2; pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_444: StdVideoH264ChromaFormatIdc = 3; pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID: StdVideoH264ChromaFormatIdc = 2147483647; pub type StdVideoH264ChromaFormatIdc = ::core::ffi::c_uint; pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_BASELINE: StdVideoH264ProfileIdc = 66; pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_MAIN: StdVideoH264ProfileIdc = 77; pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_HIGH: StdVideoH264ProfileIdc = 100; pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE: StdVideoH264ProfileIdc = 244; pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_INVALID: StdVideoH264ProfileIdc = 2147483647; pub type StdVideoH264ProfileIdc = ::core::ffi::c_uint; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_0: StdVideoH264LevelIdc = 0; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_1: StdVideoH264LevelIdc = 1; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_2: StdVideoH264LevelIdc = 2; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_3: StdVideoH264LevelIdc = 3; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_0: StdVideoH264LevelIdc = 4; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_1: StdVideoH264LevelIdc = 5; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_2: StdVideoH264LevelIdc = 6; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_0: StdVideoH264LevelIdc = 7; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_1: StdVideoH264LevelIdc = 8; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_2: StdVideoH264LevelIdc = 9; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_0: StdVideoH264LevelIdc = 10; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_1: StdVideoH264LevelIdc = 11; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_2: StdVideoH264LevelIdc = 12; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_0: StdVideoH264LevelIdc = 13; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_1: StdVideoH264LevelIdc = 14; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_2: StdVideoH264LevelIdc = 15; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_0: StdVideoH264LevelIdc = 16; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_1: StdVideoH264LevelIdc = 17; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_2: StdVideoH264LevelIdc = 18; pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_INVALID: StdVideoH264LevelIdc = 2147483647; pub type StdVideoH264LevelIdc = ::core::ffi::c_uint; pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_0: StdVideoH264PocType = 0; pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_1: StdVideoH264PocType = 1; pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_2: StdVideoH264PocType = 2; pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_INVALID: StdVideoH264PocType = 2147483647; pub type StdVideoH264PocType = ::core::ffi::c_uint; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED: StdVideoH264AspectRatioIdc = 0; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE: StdVideoH264AspectRatioIdc = 1; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11: StdVideoH264AspectRatioIdc = 2; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11: StdVideoH264AspectRatioIdc = 3; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11: StdVideoH264AspectRatioIdc = 4; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33: StdVideoH264AspectRatioIdc = 5; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11: StdVideoH264AspectRatioIdc = 6; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11: StdVideoH264AspectRatioIdc = 7; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11: StdVideoH264AspectRatioIdc = 8; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33: StdVideoH264AspectRatioIdc = 9; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11: StdVideoH264AspectRatioIdc = 10; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11: StdVideoH264AspectRatioIdc = 11; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33: StdVideoH264AspectRatioIdc = 12; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99: StdVideoH264AspectRatioIdc = 13; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3: StdVideoH264AspectRatioIdc = 14; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2: StdVideoH264AspectRatioIdc = 15; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1: StdVideoH264AspectRatioIdc = 16; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR: StdVideoH264AspectRatioIdc = 255; pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID: StdVideoH264AspectRatioIdc = 2147483647; pub type StdVideoH264AspectRatioIdc = ::core::ffi::c_uint; pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT: StdVideoH264WeightedBipredIdc = 0; pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT: StdVideoH264WeightedBipredIdc = 1; pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT: StdVideoH264WeightedBipredIdc = 2; pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID: StdVideoH264WeightedBipredIdc = 2147483647; pub type StdVideoH264WeightedBipredIdc = ::core::ffi::c_uint; pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT : StdVideoH264ModificationOfPicNumsIdc = 0 ; pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD : StdVideoH264ModificationOfPicNumsIdc = 1 ; pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM : StdVideoH264ModificationOfPicNumsIdc = 2 ; pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END: StdVideoH264ModificationOfPicNumsIdc = 3; pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID : StdVideoH264ModificationOfPicNumsIdc = 2147483647 ; pub type StdVideoH264ModificationOfPicNumsIdc = ::core::ffi::c_uint; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END: StdVideoH264MemMgmtControlOp = 0; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM: StdVideoH264MemMgmtControlOp = 1; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM: StdVideoH264MemMgmtControlOp = 2; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM: StdVideoH264MemMgmtControlOp = 3; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX : StdVideoH264MemMgmtControlOp = 4 ; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL: StdVideoH264MemMgmtControlOp = 5; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM : StdVideoH264MemMgmtControlOp = 6 ; pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID: StdVideoH264MemMgmtControlOp = 2147483647; pub type StdVideoH264MemMgmtControlOp = ::core::ffi::c_uint; pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_0: StdVideoH264CabacInitIdc = 0; pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_1: StdVideoH264CabacInitIdc = 1; pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_2: StdVideoH264CabacInitIdc = 2; pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_INVALID: StdVideoH264CabacInitIdc = 2147483647; pub type StdVideoH264CabacInitIdc = ::core::ffi::c_uint; pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED : StdVideoH264DisableDeblockingFilterIdc = 0 ; pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED : StdVideoH264DisableDeblockingFilterIdc = 1 ; pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL : StdVideoH264DisableDeblockingFilterIdc = 2 ; pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID : StdVideoH264DisableDeblockingFilterIdc = 2147483647 ; pub type StdVideoH264DisableDeblockingFilterIdc = ::core::ffi::c_uint; pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_P: StdVideoH264SliceType = 0; pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_B: StdVideoH264SliceType = 1; pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_I: StdVideoH264SliceType = 2; pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_INVALID: StdVideoH264SliceType = 2147483647; pub type StdVideoH264SliceType = ::core::ffi::c_uint; pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_P: StdVideoH264PictureType = 0; pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_B: StdVideoH264PictureType = 1; pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_I: StdVideoH264PictureType = 2; pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_IDR: StdVideoH264PictureType = 5; pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_INVALID: StdVideoH264PictureType = 2147483647; pub type StdVideoH264PictureType = ::core::ffi::c_uint; #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264SpsVuiFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 2usize]>, pub __bindgen_padding_0: u16, } #[test] fn bindgen_test_layout_StdVideoH264SpsVuiFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH264SpsVuiFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH264SpsVuiFlags)) ); } impl StdVideoH264SpsVuiFlags { #[inline] pub fn aspect_ratio_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_aspect_ratio_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn overscan_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_overscan_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn overscan_appropriate_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_overscan_appropriate_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn video_signal_type_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_video_signal_type_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn video_full_range_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_video_full_range_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn color_description_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_color_description_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn chroma_loc_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_chroma_loc_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn timing_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_timing_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn fixed_frame_rate_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_fixed_frame_rate_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn bitstream_restriction_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_bitstream_restriction_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn nal_hrd_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_nal_hrd_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn vcl_hrd_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_vcl_hrd_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( aspect_ratio_info_present_flag: u32, overscan_info_present_flag: u32, overscan_appropriate_flag: u32, video_signal_type_present_flag: u32, video_full_range_flag: u32, color_description_present_flag: u32, chroma_loc_info_present_flag: u32, timing_info_present_flag: u32, fixed_frame_rate_flag: u32, bitstream_restriction_flag: u32, nal_hrd_parameters_present_flag: u32, vcl_hrd_parameters_present_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 2usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 2usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let aspect_ratio_info_present_flag: u32 = unsafe { ::core::mem::transmute(aspect_ratio_info_present_flag) }; aspect_ratio_info_present_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let overscan_info_present_flag: u32 = unsafe { ::core::mem::transmute(overscan_info_present_flag) }; overscan_info_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let overscan_appropriate_flag: u32 = unsafe { ::core::mem::transmute(overscan_appropriate_flag) }; overscan_appropriate_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let video_signal_type_present_flag: u32 = unsafe { ::core::mem::transmute(video_signal_type_present_flag) }; video_signal_type_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let video_full_range_flag: u32 = unsafe { ::core::mem::transmute(video_full_range_flag) }; video_full_range_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let color_description_present_flag: u32 = unsafe { ::core::mem::transmute(color_description_present_flag) }; color_description_present_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let chroma_loc_info_present_flag: u32 = unsafe { ::core::mem::transmute(chroma_loc_info_present_flag) }; chroma_loc_info_present_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let timing_info_present_flag: u32 = unsafe { ::core::mem::transmute(timing_info_present_flag) }; timing_info_present_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let fixed_frame_rate_flag: u32 = unsafe { ::core::mem::transmute(fixed_frame_rate_flag) }; fixed_frame_rate_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let bitstream_restriction_flag: u32 = unsafe { ::core::mem::transmute(bitstream_restriction_flag) }; bitstream_restriction_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let nal_hrd_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(nal_hrd_parameters_present_flag) }; nal_hrd_parameters_present_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let vcl_hrd_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(vcl_hrd_parameters_present_flag) }; vcl_hrd_parameters_present_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264HrdParameters { pub cpb_cnt_minus1: u8, pub bit_rate_scale: u8, pub cpb_size_scale: u8, pub reserved1: u8, pub bit_rate_value_minus1: [u32; 32usize], pub cpb_size_value_minus1: [u32; 32usize], pub cbr_flag: [u8; 32usize], pub initial_cpb_removal_delay_length_minus1: u32, pub cpb_removal_delay_length_minus1: u32, pub dpb_output_delay_length_minus1: u32, pub time_offset_length: u32, } #[test] fn bindgen_test_layout_StdVideoH264HrdParameters() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 308usize, concat!("Size of: ", stringify!(StdVideoH264HrdParameters)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH264HrdParameters)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_cnt_minus1) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(cpb_cnt_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_rate_scale) as usize - ptr as usize }, 1usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(bit_rate_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_scale) as usize - ptr as usize }, 2usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(cpb_size_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 3usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_rate_value_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(bit_rate_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_value_minus1) as usize - ptr as usize }, 132usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(cpb_size_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cbr_flag) as usize - ptr as usize }, 260usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(cbr_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).initial_cpb_removal_delay_length_minus1) as usize - ptr as usize }, 292usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(initial_cpb_removal_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_removal_delay_length_minus1) as usize - ptr as usize }, 296usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(cpb_removal_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).dpb_output_delay_length_minus1) as usize - ptr as usize }, 300usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(dpb_output_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).time_offset_length) as usize - ptr as usize }, 304usize, concat!( "Offset of field: ", stringify!(StdVideoH264HrdParameters), "::", stringify!(time_offset_length) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264SequenceParameterSetVui { pub flags: StdVideoH264SpsVuiFlags, pub aspect_ratio_idc: StdVideoH264AspectRatioIdc, pub sar_width: u16, pub sar_height: u16, pub video_format: u8, pub colour_primaries: u8, pub transfer_characteristics: u8, pub matrix_coefficients: u8, pub num_units_in_tick: u32, pub time_scale: u32, pub max_num_reorder_frames: u8, pub max_dec_frame_buffering: u8, pub chroma_sample_loc_type_top_field: u8, pub chroma_sample_loc_type_bottom_field: u8, pub reserved1: u32, pub pHrdParameters: *const StdVideoH264HrdParameters, } #[test] fn bindgen_test_layout_StdVideoH264SequenceParameterSetVui() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 40usize, concat!("Size of: ", stringify!(StdVideoH264SequenceParameterSetVui)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoH264SequenceParameterSetVui) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).aspect_ratio_idc) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(aspect_ratio_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sar_width) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(sar_width) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sar_height) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(sar_height) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).video_format) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(video_format) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).colour_primaries) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(colour_primaries) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).transfer_characteristics) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(transfer_characteristics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).matrix_coefficients) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(matrix_coefficients) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_units_in_tick) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(num_units_in_tick) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).time_scale) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(time_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_num_reorder_frames) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(max_num_reorder_frames) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_dec_frame_buffering) as usize - ptr as usize }, 25usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(max_dec_frame_buffering) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_sample_loc_type_top_field) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(chroma_sample_loc_type_top_field) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_sample_loc_type_bottom_field) as usize - ptr as usize }, 27usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(chroma_sample_loc_type_bottom_field) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSetVui), "::", stringify!(pHrdParameters) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264SpsFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 2usize]>, pub __bindgen_padding_0: u16, } #[test] fn bindgen_test_layout_StdVideoH264SpsFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH264SpsFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH264SpsFlags)) ); } impl StdVideoH264SpsFlags { #[inline] pub fn constraint_set0_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set0_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn constraint_set1_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set1_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn constraint_set2_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set2_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn constraint_set3_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set3_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn constraint_set4_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set4_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn constraint_set5_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_constraint_set5_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn direct_8x8_inference_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_direct_8x8_inference_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn mb_adaptive_frame_field_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_mb_adaptive_frame_field_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn frame_mbs_only_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_frame_mbs_only_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn delta_pic_order_always_zero_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_delta_pic_order_always_zero_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn separate_colour_plane_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_separate_colour_plane_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn gaps_in_frame_num_value_allowed_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_gaps_in_frame_num_value_allowed_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn qpprime_y_zero_transform_bypass_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_qpprime_y_zero_transform_bypass_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn frame_cropping_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_frame_cropping_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn seq_scaling_matrix_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_seq_scaling_matrix_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn vui_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_vui_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( constraint_set0_flag: u32, constraint_set1_flag: u32, constraint_set2_flag: u32, constraint_set3_flag: u32, constraint_set4_flag: u32, constraint_set5_flag: u32, direct_8x8_inference_flag: u32, mb_adaptive_frame_field_flag: u32, frame_mbs_only_flag: u32, delta_pic_order_always_zero_flag: u32, separate_colour_plane_flag: u32, gaps_in_frame_num_value_allowed_flag: u32, qpprime_y_zero_transform_bypass_flag: u32, frame_cropping_flag: u32, seq_scaling_matrix_present_flag: u32, vui_parameters_present_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 2usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 2usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let constraint_set0_flag: u32 = unsafe { ::core::mem::transmute(constraint_set0_flag) }; constraint_set0_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let constraint_set1_flag: u32 = unsafe { ::core::mem::transmute(constraint_set1_flag) }; constraint_set1_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let constraint_set2_flag: u32 = unsafe { ::core::mem::transmute(constraint_set2_flag) }; constraint_set2_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let constraint_set3_flag: u32 = unsafe { ::core::mem::transmute(constraint_set3_flag) }; constraint_set3_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let constraint_set4_flag: u32 = unsafe { ::core::mem::transmute(constraint_set4_flag) }; constraint_set4_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let constraint_set5_flag: u32 = unsafe { ::core::mem::transmute(constraint_set5_flag) }; constraint_set5_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let direct_8x8_inference_flag: u32 = unsafe { ::core::mem::transmute(direct_8x8_inference_flag) }; direct_8x8_inference_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let mb_adaptive_frame_field_flag: u32 = unsafe { ::core::mem::transmute(mb_adaptive_frame_field_flag) }; mb_adaptive_frame_field_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let frame_mbs_only_flag: u32 = unsafe { ::core::mem::transmute(frame_mbs_only_flag) }; frame_mbs_only_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let delta_pic_order_always_zero_flag: u32 = unsafe { ::core::mem::transmute(delta_pic_order_always_zero_flag) }; delta_pic_order_always_zero_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let separate_colour_plane_flag: u32 = unsafe { ::core::mem::transmute(separate_colour_plane_flag) }; separate_colour_plane_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let gaps_in_frame_num_value_allowed_flag: u32 = unsafe { ::core::mem::transmute(gaps_in_frame_num_value_allowed_flag) }; gaps_in_frame_num_value_allowed_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let qpprime_y_zero_transform_bypass_flag: u32 = unsafe { ::core::mem::transmute(qpprime_y_zero_transform_bypass_flag) }; qpprime_y_zero_transform_bypass_flag as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let frame_cropping_flag: u32 = unsafe { ::core::mem::transmute(frame_cropping_flag) }; frame_cropping_flag as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let seq_scaling_matrix_present_flag: u32 = unsafe { ::core::mem::transmute(seq_scaling_matrix_present_flag) }; seq_scaling_matrix_present_flag as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let vui_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(vui_parameters_present_flag) }; vui_parameters_present_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264ScalingLists { pub scaling_list_present_mask: u16, pub use_default_scaling_matrix_mask: u16, pub ScalingList4x4: [[u8; 16usize]; 6usize], pub ScalingList8x8: [[u8; 64usize]; 6usize], } #[test] fn bindgen_test_layout_StdVideoH264ScalingLists() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 484usize, concat!("Size of: ", stringify!(StdVideoH264ScalingLists)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!("Alignment of ", stringify!(StdVideoH264ScalingLists)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).scaling_list_present_mask) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH264ScalingLists), "::", stringify!(scaling_list_present_mask) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).use_default_scaling_matrix_mask) as usize - ptr as usize }, 2usize, concat!( "Offset of field: ", stringify!(StdVideoH264ScalingLists), "::", stringify!(use_default_scaling_matrix_mask) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList4x4) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH264ScalingLists), "::", stringify!(ScalingList4x4) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList8x8) as usize - ptr as usize }, 100usize, concat!( "Offset of field: ", stringify!(StdVideoH264ScalingLists), "::", stringify!(ScalingList8x8) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264SequenceParameterSet { pub flags: StdVideoH264SpsFlags, pub profile_idc: StdVideoH264ProfileIdc, pub level_idc: StdVideoH264LevelIdc, pub chroma_format_idc: StdVideoH264ChromaFormatIdc, pub seq_parameter_set_id: u8, pub bit_depth_luma_minus8: u8, pub bit_depth_chroma_minus8: u8, pub log2_max_frame_num_minus4: u8, pub pic_order_cnt_type: StdVideoH264PocType, pub offset_for_non_ref_pic: i32, pub offset_for_top_to_bottom_field: i32, pub log2_max_pic_order_cnt_lsb_minus4: u8, pub num_ref_frames_in_pic_order_cnt_cycle: u8, pub max_num_ref_frames: u8, pub reserved1: u8, pub pic_width_in_mbs_minus1: u32, pub pic_height_in_map_units_minus1: u32, pub frame_crop_left_offset: u32, pub frame_crop_right_offset: u32, pub frame_crop_top_offset: u32, pub frame_crop_bottom_offset: u32, pub reserved2: u32, pub pOffsetForRefFrame: *const i32, pub pScalingLists: *const StdVideoH264ScalingLists, pub pSequenceParameterSetVui: *const StdVideoH264SequenceParameterSetVui, } #[test] fn bindgen_test_layout_StdVideoH264SequenceParameterSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 88usize, concat!("Size of: ", stringify!(StdVideoH264SequenceParameterSet)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoH264SequenceParameterSet) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).profile_idc) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(profile_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).level_idc) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(level_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_format_idc) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(chroma_format_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_depth_luma_minus8) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(bit_depth_luma_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_depth_chroma_minus8) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(bit_depth_chroma_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_frame_num_minus4) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(log2_max_frame_num_minus4) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_order_cnt_type) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pic_order_cnt_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).offset_for_non_ref_pic) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(offset_for_non_ref_pic) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).offset_for_top_to_bottom_field) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(offset_for_top_to_bottom_field) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_pic_order_cnt_lsb_minus4) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(log2_max_pic_order_cnt_lsb_minus4) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_frames_in_pic_order_cnt_cycle) as usize - ptr as usize }, 33usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(num_ref_frames_in_pic_order_cnt_cycle) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_num_ref_frames) as usize - ptr as usize }, 34usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(max_num_ref_frames) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 35usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_width_in_mbs_minus1) as usize - ptr as usize }, 36usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pic_width_in_mbs_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_height_in_map_units_minus1) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pic_height_in_map_units_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_crop_left_offset) as usize - ptr as usize }, 44usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(frame_crop_left_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_crop_right_offset) as usize - ptr as usize }, 48usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(frame_crop_right_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_crop_top_offset) as usize - ptr as usize }, 52usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(frame_crop_top_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_crop_bottom_offset) as usize - ptr as usize }, 56usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(frame_crop_bottom_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 60usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pOffsetForRefFrame) as usize - ptr as usize }, 64usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pOffsetForRefFrame) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize }, 72usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pScalingLists) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pSequenceParameterSetVui) as usize - ptr as usize }, 80usize, concat!( "Offset of field: ", stringify!(StdVideoH264SequenceParameterSet), "::", stringify!(pSequenceParameterSetVui) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264PpsFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoH264PpsFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH264PpsFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH264PpsFlags)) ); } impl StdVideoH264PpsFlags { #[inline] pub fn transform_8x8_mode_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_transform_8x8_mode_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn redundant_pic_cnt_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_redundant_pic_cnt_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn constrained_intra_pred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_constrained_intra_pred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn deblocking_filter_control_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_deblocking_filter_control_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn weighted_pred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_weighted_pred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn bottom_field_pic_order_in_frame_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_bottom_field_pic_order_in_frame_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn entropy_coding_mode_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_entropy_coding_mode_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn pic_scaling_matrix_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_pic_scaling_matrix_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( transform_8x8_mode_flag: u32, redundant_pic_cnt_present_flag: u32, constrained_intra_pred_flag: u32, deblocking_filter_control_present_flag: u32, weighted_pred_flag: u32, bottom_field_pic_order_in_frame_present_flag: u32, entropy_coding_mode_flag: u32, pic_scaling_matrix_present_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let transform_8x8_mode_flag: u32 = unsafe { ::core::mem::transmute(transform_8x8_mode_flag) }; transform_8x8_mode_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let redundant_pic_cnt_present_flag: u32 = unsafe { ::core::mem::transmute(redundant_pic_cnt_present_flag) }; redundant_pic_cnt_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let constrained_intra_pred_flag: u32 = unsafe { ::core::mem::transmute(constrained_intra_pred_flag) }; constrained_intra_pred_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let deblocking_filter_control_present_flag: u32 = unsafe { ::core::mem::transmute(deblocking_filter_control_present_flag) }; deblocking_filter_control_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let weighted_pred_flag: u32 = unsafe { ::core::mem::transmute(weighted_pred_flag) }; weighted_pred_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let bottom_field_pic_order_in_frame_present_flag: u32 = unsafe { ::core::mem::transmute(bottom_field_pic_order_in_frame_present_flag) }; bottom_field_pic_order_in_frame_present_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let entropy_coding_mode_flag: u32 = unsafe { ::core::mem::transmute(entropy_coding_mode_flag) }; entropy_coding_mode_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let pic_scaling_matrix_present_flag: u32 = unsafe { ::core::mem::transmute(pic_scaling_matrix_present_flag) }; pic_scaling_matrix_present_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH264PictureParameterSet { pub flags: StdVideoH264PpsFlags, pub seq_parameter_set_id: u8, pub pic_parameter_set_id: u8, pub num_ref_idx_l0_default_active_minus1: u8, pub num_ref_idx_l1_default_active_minus1: u8, pub weighted_bipred_idc: StdVideoH264WeightedBipredIdc, pub pic_init_qp_minus26: i8, pub pic_init_qs_minus26: i8, pub chroma_qp_index_offset: i8, pub second_chroma_qp_index_offset: i8, pub pScalingLists: *const StdVideoH264ScalingLists, } #[test] fn bindgen_test_layout_StdVideoH264PictureParameterSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 24usize, concat!("Size of: ", stringify!(StdVideoH264PictureParameterSet)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoH264PictureParameterSet)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l0_default_active_minus1) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(num_ref_idx_l0_default_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l1_default_active_minus1) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(num_ref_idx_l1_default_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).weighted_bipred_idc) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(weighted_bipred_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_init_qp_minus26) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(pic_init_qp_minus26) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_init_qs_minus26) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(pic_init_qs_minus26) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_qp_index_offset) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(chroma_qp_index_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).second_chroma_qp_index_offset) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(second_chroma_qp_index_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH264PictureParameterSet), "::", stringify!(pScalingLists) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH264PictureInfoFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH264PictureInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoDecodeH264PictureInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeH264PictureInfoFlags) ) ); } impl StdVideoDecodeH264PictureInfoFlags { #[inline] pub fn field_pic_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_field_pic_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn is_intra(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_is_intra(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn IdrPicFlag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_IdrPicFlag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn bottom_field_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_bottom_field_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn is_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_is_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn complementary_field_pair(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_complementary_field_pair(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( field_pic_flag: u32, is_intra: u32, IdrPicFlag: u32, bottom_field_flag: u32, is_reference: u32, complementary_field_pair: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let field_pic_flag: u32 = unsafe { ::core::mem::transmute(field_pic_flag) }; field_pic_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let is_intra: u32 = unsafe { ::core::mem::transmute(is_intra) }; is_intra as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let IdrPicFlag: u32 = unsafe { ::core::mem::transmute(IdrPicFlag) }; IdrPicFlag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let bottom_field_flag: u32 = unsafe { ::core::mem::transmute(bottom_field_flag) }; bottom_field_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let is_reference: u32 = unsafe { ::core::mem::transmute(is_reference) }; is_reference as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let complementary_field_pair: u32 = unsafe { ::core::mem::transmute(complementary_field_pair) }; complementary_field_pair as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH264PictureInfo { pub flags: StdVideoDecodeH264PictureInfoFlags, pub seq_parameter_set_id: u8, pub pic_parameter_set_id: u8, pub reserved1: u8, pub reserved2: u8, pub frame_num: u16, pub idr_pic_id: u16, pub PicOrderCnt: [i32; 2usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH264PictureInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 20usize, concat!("Size of: ", stringify!(StdVideoDecodeH264PictureInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoDecodeH264PictureInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_num) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(frame_num) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).idr_pic_id) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(idr_pic_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264PictureInfo), "::", stringify!(PicOrderCnt) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH264ReferenceInfoFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH264ReferenceInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoDecodeH264ReferenceInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeH264ReferenceInfoFlags) ) ); } impl StdVideoDecodeH264ReferenceInfoFlags { #[inline] pub fn top_field_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_top_field_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn bottom_field_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_bottom_field_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn used_for_long_term_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_used_for_long_term_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn is_non_existing(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_is_non_existing(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( top_field_flag: u32, bottom_field_flag: u32, used_for_long_term_reference: u32, is_non_existing: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let top_field_flag: u32 = unsafe { ::core::mem::transmute(top_field_flag) }; top_field_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let bottom_field_flag: u32 = unsafe { ::core::mem::transmute(bottom_field_flag) }; bottom_field_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let used_for_long_term_reference: u32 = unsafe { ::core::mem::transmute(used_for_long_term_reference) }; used_for_long_term_reference as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let is_non_existing: u32 = unsafe { ::core::mem::transmute(is_non_existing) }; is_non_existing as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH264ReferenceInfo { pub flags: StdVideoDecodeH264ReferenceInfoFlags, pub FrameNum: u16, pub reserved: u16, pub PicOrderCnt: [i32; 2usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH264ReferenceInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoDecodeH264ReferenceInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoDecodeH264ReferenceInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264ReferenceInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).FrameNum) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264ReferenceInfo), "::", stringify!(FrameNum) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264ReferenceInfo), "::", stringify!(reserved) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH264ReferenceInfo), "::", stringify!(PicOrderCnt) ) ); } pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME: StdVideoH265ChromaFormatIdc = 0; pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_420: StdVideoH265ChromaFormatIdc = 1; pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_422: StdVideoH265ChromaFormatIdc = 2; pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_444: StdVideoH265ChromaFormatIdc = 3; pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID: StdVideoH265ChromaFormatIdc = 2147483647; pub type StdVideoH265ChromaFormatIdc = ::core::ffi::c_uint; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN: StdVideoH265ProfileIdc = 1; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN_10: StdVideoH265ProfileIdc = 2; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE: StdVideoH265ProfileIdc = 3; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS: StdVideoH265ProfileIdc = 4; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS: StdVideoH265ProfileIdc = 9; pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_INVALID: StdVideoH265ProfileIdc = 2147483647; pub type StdVideoH265ProfileIdc = ::core::ffi::c_uint; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_1_0: StdVideoH265LevelIdc = 0; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_2_0: StdVideoH265LevelIdc = 1; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_2_1: StdVideoH265LevelIdc = 2; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_3_0: StdVideoH265LevelIdc = 3; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_3_1: StdVideoH265LevelIdc = 4; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_4_0: StdVideoH265LevelIdc = 5; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_4_1: StdVideoH265LevelIdc = 6; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_0: StdVideoH265LevelIdc = 7; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_1: StdVideoH265LevelIdc = 8; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_2: StdVideoH265LevelIdc = 9; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_0: StdVideoH265LevelIdc = 10; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_1: StdVideoH265LevelIdc = 11; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_2: StdVideoH265LevelIdc = 12; pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_INVALID: StdVideoH265LevelIdc = 2147483647; pub type StdVideoH265LevelIdc = ::core::ffi::c_uint; pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_B: StdVideoH265SliceType = 0; pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_P: StdVideoH265SliceType = 1; pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_I: StdVideoH265SliceType = 2; pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_INVALID: StdVideoH265SliceType = 2147483647; pub type StdVideoH265SliceType = ::core::ffi::c_uint; pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_P: StdVideoH265PictureType = 0; pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_B: StdVideoH265PictureType = 1; pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_I: StdVideoH265PictureType = 2; pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_IDR: StdVideoH265PictureType = 3; pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_INVALID: StdVideoH265PictureType = 2147483647; pub type StdVideoH265PictureType = ::core::ffi::c_uint; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED: StdVideoH265AspectRatioIdc = 0; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE: StdVideoH265AspectRatioIdc = 1; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11: StdVideoH265AspectRatioIdc = 2; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11: StdVideoH265AspectRatioIdc = 3; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11: StdVideoH265AspectRatioIdc = 4; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33: StdVideoH265AspectRatioIdc = 5; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11: StdVideoH265AspectRatioIdc = 6; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11: StdVideoH265AspectRatioIdc = 7; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11: StdVideoH265AspectRatioIdc = 8; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33: StdVideoH265AspectRatioIdc = 9; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11: StdVideoH265AspectRatioIdc = 10; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11: StdVideoH265AspectRatioIdc = 11; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33: StdVideoH265AspectRatioIdc = 12; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99: StdVideoH265AspectRatioIdc = 13; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3: StdVideoH265AspectRatioIdc = 14; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2: StdVideoH265AspectRatioIdc = 15; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1: StdVideoH265AspectRatioIdc = 16; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR: StdVideoH265AspectRatioIdc = 255; pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID: StdVideoH265AspectRatioIdc = 2147483647; pub type StdVideoH265AspectRatioIdc = ::core::ffi::c_uint; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265DecPicBufMgr { pub max_latency_increase_plus1: [u32; 7usize], pub max_dec_pic_buffering_minus1: [u8; 7usize], pub max_num_reorder_pics: [u8; 7usize], } #[test] fn bindgen_test_layout_StdVideoH265DecPicBufMgr() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 44usize, concat!("Size of: ", stringify!(StdVideoH265DecPicBufMgr)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265DecPicBufMgr)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_latency_increase_plus1) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265DecPicBufMgr), "::", stringify!(max_latency_increase_plus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_dec_pic_buffering_minus1) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoH265DecPicBufMgr), "::", stringify!(max_dec_pic_buffering_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_num_reorder_pics) as usize - ptr as usize }, 35usize, concat!( "Offset of field: ", stringify!(StdVideoH265DecPicBufMgr), "::", stringify!(max_num_reorder_pics) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265SubLayerHrdParameters { pub bit_rate_value_minus1: [u32; 32usize], pub cpb_size_value_minus1: [u32; 32usize], pub cpb_size_du_value_minus1: [u32; 32usize], pub bit_rate_du_value_minus1: [u32; 32usize], pub cbr_flag: u32, } #[test] fn bindgen_test_layout_StdVideoH265SubLayerHrdParameters() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 516usize, concat!("Size of: ", stringify!(StdVideoH265SubLayerHrdParameters)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoH265SubLayerHrdParameters) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_rate_value_minus1) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265SubLayerHrdParameters), "::", stringify!(bit_rate_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_value_minus1) as usize - ptr as usize }, 128usize, concat!( "Offset of field: ", stringify!(StdVideoH265SubLayerHrdParameters), "::", stringify!(cpb_size_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_du_value_minus1) as usize - ptr as usize }, 256usize, concat!( "Offset of field: ", stringify!(StdVideoH265SubLayerHrdParameters), "::", stringify!(cpb_size_du_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_rate_du_value_minus1) as usize - ptr as usize }, 384usize, concat!( "Offset of field: ", stringify!(StdVideoH265SubLayerHrdParameters), "::", stringify!(bit_rate_du_value_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cbr_flag) as usize - ptr as usize }, 512usize, concat!( "Offset of field: ", stringify!(StdVideoH265SubLayerHrdParameters), "::", stringify!(cbr_flag) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265HrdFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoH265HrdFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265HrdFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265HrdFlags)) ); } impl StdVideoH265HrdFlags { #[inline] pub fn nal_hrd_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_nal_hrd_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn vcl_hrd_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_vcl_hrd_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn sub_pic_hrd_params_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_sub_pic_hrd_params_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn sub_pic_cpb_params_in_pic_timing_sei_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_sub_pic_cpb_params_in_pic_timing_sei_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn fixed_pic_rate_general_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 8u8) as u32) } } #[inline] pub fn set_fixed_pic_rate_general_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 8u8, val as u64) } } #[inline] pub fn fixed_pic_rate_within_cvs_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 8u8) as u32) } } #[inline] pub fn set_fixed_pic_rate_within_cvs_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 8u8, val as u64) } } #[inline] pub fn low_delay_hrd_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 8u8) as u32) } } #[inline] pub fn set_low_delay_hrd_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(20usize, 8u8, val as u64) } } #[inline] pub fn new_bitfield_1( nal_hrd_parameters_present_flag: u32, vcl_hrd_parameters_present_flag: u32, sub_pic_hrd_params_present_flag: u32, sub_pic_cpb_params_in_pic_timing_sei_flag: u32, fixed_pic_rate_general_flag: u32, fixed_pic_rate_within_cvs_flag: u32, low_delay_hrd_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let nal_hrd_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(nal_hrd_parameters_present_flag) }; nal_hrd_parameters_present_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let vcl_hrd_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(vcl_hrd_parameters_present_flag) }; vcl_hrd_parameters_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let sub_pic_hrd_params_present_flag: u32 = unsafe { ::core::mem::transmute(sub_pic_hrd_params_present_flag) }; sub_pic_hrd_params_present_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let sub_pic_cpb_params_in_pic_timing_sei_flag: u32 = unsafe { ::core::mem::transmute(sub_pic_cpb_params_in_pic_timing_sei_flag) }; sub_pic_cpb_params_in_pic_timing_sei_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 8u8, { let fixed_pic_rate_general_flag: u32 = unsafe { ::core::mem::transmute(fixed_pic_rate_general_flag) }; fixed_pic_rate_general_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 8u8, { let fixed_pic_rate_within_cvs_flag: u32 = unsafe { ::core::mem::transmute(fixed_pic_rate_within_cvs_flag) }; fixed_pic_rate_within_cvs_flag as u64 }); __bindgen_bitfield_unit.set(20usize, 8u8, { let low_delay_hrd_flag: u32 = unsafe { ::core::mem::transmute(low_delay_hrd_flag) }; low_delay_hrd_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265HrdParameters { pub flags: StdVideoH265HrdFlags, pub tick_divisor_minus2: u8, pub du_cpb_removal_delay_increment_length_minus1: u8, pub dpb_output_delay_du_length_minus1: u8, pub bit_rate_scale: u8, pub cpb_size_scale: u8, pub cpb_size_du_scale: u8, pub initial_cpb_removal_delay_length_minus1: u8, pub au_cpb_removal_delay_length_minus1: u8, pub dpb_output_delay_length_minus1: u8, pub cpb_cnt_minus1: [u8; 7usize], pub elemental_duration_in_tc_minus1: [u16; 7usize], pub reserved: [u16; 3usize], pub pSubLayerHrdParametersNal: *const StdVideoH265SubLayerHrdParameters, pub pSubLayerHrdParametersVcl: *const StdVideoH265SubLayerHrdParameters, } #[test] fn bindgen_test_layout_StdVideoH265HrdParameters() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 56usize, concat!("Size of: ", stringify!(StdVideoH265HrdParameters)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoH265HrdParameters)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).tick_divisor_minus2) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(tick_divisor_minus2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).du_cpb_removal_delay_increment_length_minus1) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(du_cpb_removal_delay_increment_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).dpb_output_delay_du_length_minus1) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(dpb_output_delay_du_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_rate_scale) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(bit_rate_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_scale) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(cpb_size_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_size_du_scale) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(cpb_size_du_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).initial_cpb_removal_delay_length_minus1) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(initial_cpb_removal_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).au_cpb_removal_delay_length_minus1) as usize - ptr as usize }, 11usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(au_cpb_removal_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).dpb_output_delay_length_minus1) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(dpb_output_delay_length_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cpb_cnt_minus1) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(cpb_cnt_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).elemental_duration_in_tc_minus1) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(elemental_duration_in_tc_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize }, 34usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(reserved) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pSubLayerHrdParametersNal) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(pSubLayerHrdParametersNal) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pSubLayerHrdParametersVcl) as usize - ptr as usize }, 48usize, concat!( "Offset of field: ", stringify!(StdVideoH265HrdParameters), "::", stringify!(pSubLayerHrdParametersVcl) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265VpsFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoH265VpsFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265VpsFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265VpsFlags)) ); } impl StdVideoH265VpsFlags { #[inline] pub fn vps_temporal_id_nesting_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_vps_temporal_id_nesting_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn vps_sub_layer_ordering_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_vps_sub_layer_ordering_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn vps_timing_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_vps_timing_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn vps_poc_proportional_to_timing_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_vps_poc_proportional_to_timing_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( vps_temporal_id_nesting_flag: u32, vps_sub_layer_ordering_info_present_flag: u32, vps_timing_info_present_flag: u32, vps_poc_proportional_to_timing_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let vps_temporal_id_nesting_flag: u32 = unsafe { ::core::mem::transmute(vps_temporal_id_nesting_flag) }; vps_temporal_id_nesting_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let vps_sub_layer_ordering_info_present_flag: u32 = unsafe { ::core::mem::transmute(vps_sub_layer_ordering_info_present_flag) }; vps_sub_layer_ordering_info_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let vps_timing_info_present_flag: u32 = unsafe { ::core::mem::transmute(vps_timing_info_present_flag) }; vps_timing_info_present_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let vps_poc_proportional_to_timing_flag: u32 = unsafe { ::core::mem::transmute(vps_poc_proportional_to_timing_flag) }; vps_poc_proportional_to_timing_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265ProfileTierLevelFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoH265ProfileTierLevelFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265ProfileTierLevelFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoH265ProfileTierLevelFlags) ) ); } impl StdVideoH265ProfileTierLevelFlags { #[inline] pub fn general_tier_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_general_tier_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn general_progressive_source_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_general_progressive_source_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn general_interlaced_source_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_general_interlaced_source_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn general_non_packed_constraint_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_general_non_packed_constraint_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn general_frame_only_constraint_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_general_frame_only_constraint_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( general_tier_flag: u32, general_progressive_source_flag: u32, general_interlaced_source_flag: u32, general_non_packed_constraint_flag: u32, general_frame_only_constraint_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let general_tier_flag: u32 = unsafe { ::core::mem::transmute(general_tier_flag) }; general_tier_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let general_progressive_source_flag: u32 = unsafe { ::core::mem::transmute(general_progressive_source_flag) }; general_progressive_source_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let general_interlaced_source_flag: u32 = unsafe { ::core::mem::transmute(general_interlaced_source_flag) }; general_interlaced_source_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let general_non_packed_constraint_flag: u32 = unsafe { ::core::mem::transmute(general_non_packed_constraint_flag) }; general_non_packed_constraint_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let general_frame_only_constraint_flag: u32 = unsafe { ::core::mem::transmute(general_frame_only_constraint_flag) }; general_frame_only_constraint_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265ProfileTierLevel { pub flags: StdVideoH265ProfileTierLevelFlags, pub general_profile_idc: StdVideoH265ProfileIdc, pub general_level_idc: StdVideoH265LevelIdc, } #[test] fn bindgen_test_layout_StdVideoH265ProfileTierLevel() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 12usize, concat!("Size of: ", stringify!(StdVideoH265ProfileTierLevel)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265ProfileTierLevel)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265ProfileTierLevel), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).general_profile_idc) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265ProfileTierLevel), "::", stringify!(general_profile_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).general_level_idc) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265ProfileTierLevel), "::", stringify!(general_level_idc) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265VideoParameterSet { pub flags: StdVideoH265VpsFlags, pub vps_video_parameter_set_id: u8, pub vps_max_sub_layers_minus1: u8, pub reserved1: u8, pub reserved2: u8, pub vps_num_units_in_tick: u32, pub vps_time_scale: u32, pub vps_num_ticks_poc_diff_one_minus1: u32, pub reserved3: u32, pub pDecPicBufMgr: *const StdVideoH265DecPicBufMgr, pub pHrdParameters: *const StdVideoH265HrdParameters, pub pProfileTierLevel: *const StdVideoH265ProfileTierLevel, } #[test] fn bindgen_test_layout_StdVideoH265VideoParameterSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 48usize, concat!("Size of: ", stringify!(StdVideoH265VideoParameterSet)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoH265VideoParameterSet)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vps_video_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(vps_video_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vps_max_sub_layers_minus1) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(vps_max_sub_layers_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vps_num_units_in_tick) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(vps_num_units_in_tick) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vps_time_scale) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(vps_time_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vps_num_ticks_poc_diff_one_minus1) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(vps_num_ticks_poc_diff_one_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(reserved3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pDecPicBufMgr) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(pDecPicBufMgr) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(pHrdParameters) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pProfileTierLevel) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH265VideoParameterSet), "::", stringify!(pProfileTierLevel) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265ScalingLists { pub ScalingList4x4: [[u8; 16usize]; 6usize], pub ScalingList8x8: [[u8; 64usize]; 6usize], pub ScalingList16x16: [[u8; 64usize]; 6usize], pub ScalingList32x32: [[u8; 64usize]; 2usize], pub ScalingListDCCoef16x16: [u8; 6usize], pub ScalingListDCCoef32x32: [u8; 2usize], } #[test] fn bindgen_test_layout_StdVideoH265ScalingLists() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 1000usize, concat!("Size of: ", stringify!(StdVideoH265ScalingLists)) ); assert_eq!( ::core::mem::align_of::(), 1usize, concat!("Alignment of ", stringify!(StdVideoH265ScalingLists)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList4x4) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingList4x4) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList8x8) as usize - ptr as usize }, 96usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingList8x8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList16x16) as usize - ptr as usize }, 480usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingList16x16) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingList32x32) as usize - ptr as usize }, 864usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingList32x32) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingListDCCoef16x16) as usize - ptr as usize }, 992usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingListDCCoef16x16) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ScalingListDCCoef32x32) as usize - ptr as usize }, 998usize, concat!( "Offset of field: ", stringify!(StdVideoH265ScalingLists), "::", stringify!(ScalingListDCCoef32x32) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265SpsVuiFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 3usize]>, pub __bindgen_padding_0: u8, } #[test] fn bindgen_test_layout_StdVideoH265SpsVuiFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265SpsVuiFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265SpsVuiFlags)) ); } impl StdVideoH265SpsVuiFlags { #[inline] pub fn aspect_ratio_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_aspect_ratio_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn overscan_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_overscan_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn overscan_appropriate_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_overscan_appropriate_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn video_signal_type_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_video_signal_type_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn video_full_range_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_video_full_range_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn colour_description_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_colour_description_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn chroma_loc_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_chroma_loc_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn neutral_chroma_indication_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_neutral_chroma_indication_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn field_seq_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_field_seq_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn frame_field_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_frame_field_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn default_display_window_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_default_display_window_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn vui_timing_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_vui_timing_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn vui_poc_proportional_to_timing_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_vui_poc_proportional_to_timing_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn vui_hrd_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_vui_hrd_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn bitstream_restriction_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_bitstream_restriction_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn tiles_fixed_structure_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_tiles_fixed_structure_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn motion_vectors_over_pic_boundaries_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) } } #[inline] pub fn set_motion_vectors_over_pic_boundaries_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(16usize, 1u8, val as u64) } } #[inline] pub fn restricted_ref_pic_lists_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) } } #[inline] pub fn set_restricted_ref_pic_lists_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(17usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( aspect_ratio_info_present_flag: u32, overscan_info_present_flag: u32, overscan_appropriate_flag: u32, video_signal_type_present_flag: u32, video_full_range_flag: u32, colour_description_present_flag: u32, chroma_loc_info_present_flag: u32, neutral_chroma_indication_flag: u32, field_seq_flag: u32, frame_field_info_present_flag: u32, default_display_window_flag: u32, vui_timing_info_present_flag: u32, vui_poc_proportional_to_timing_flag: u32, vui_hrd_parameters_present_flag: u32, bitstream_restriction_flag: u32, tiles_fixed_structure_flag: u32, motion_vectors_over_pic_boundaries_flag: u32, restricted_ref_pic_lists_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 3usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 3usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let aspect_ratio_info_present_flag: u32 = unsafe { ::core::mem::transmute(aspect_ratio_info_present_flag) }; aspect_ratio_info_present_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let overscan_info_present_flag: u32 = unsafe { ::core::mem::transmute(overscan_info_present_flag) }; overscan_info_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let overscan_appropriate_flag: u32 = unsafe { ::core::mem::transmute(overscan_appropriate_flag) }; overscan_appropriate_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let video_signal_type_present_flag: u32 = unsafe { ::core::mem::transmute(video_signal_type_present_flag) }; video_signal_type_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let video_full_range_flag: u32 = unsafe { ::core::mem::transmute(video_full_range_flag) }; video_full_range_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let colour_description_present_flag: u32 = unsafe { ::core::mem::transmute(colour_description_present_flag) }; colour_description_present_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let chroma_loc_info_present_flag: u32 = unsafe { ::core::mem::transmute(chroma_loc_info_present_flag) }; chroma_loc_info_present_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let neutral_chroma_indication_flag: u32 = unsafe { ::core::mem::transmute(neutral_chroma_indication_flag) }; neutral_chroma_indication_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let field_seq_flag: u32 = unsafe { ::core::mem::transmute(field_seq_flag) }; field_seq_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let frame_field_info_present_flag: u32 = unsafe { ::core::mem::transmute(frame_field_info_present_flag) }; frame_field_info_present_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let default_display_window_flag: u32 = unsafe { ::core::mem::transmute(default_display_window_flag) }; default_display_window_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let vui_timing_info_present_flag: u32 = unsafe { ::core::mem::transmute(vui_timing_info_present_flag) }; vui_timing_info_present_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let vui_poc_proportional_to_timing_flag: u32 = unsafe { ::core::mem::transmute(vui_poc_proportional_to_timing_flag) }; vui_poc_proportional_to_timing_flag as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let vui_hrd_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(vui_hrd_parameters_present_flag) }; vui_hrd_parameters_present_flag as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let bitstream_restriction_flag: u32 = unsafe { ::core::mem::transmute(bitstream_restriction_flag) }; bitstream_restriction_flag as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let tiles_fixed_structure_flag: u32 = unsafe { ::core::mem::transmute(tiles_fixed_structure_flag) }; tiles_fixed_structure_flag as u64 }); __bindgen_bitfield_unit.set(16usize, 1u8, { let motion_vectors_over_pic_boundaries_flag: u32 = unsafe { ::core::mem::transmute(motion_vectors_over_pic_boundaries_flag) }; motion_vectors_over_pic_boundaries_flag as u64 }); __bindgen_bitfield_unit.set(17usize, 1u8, { let restricted_ref_pic_lists_flag: u32 = unsafe { ::core::mem::transmute(restricted_ref_pic_lists_flag) }; restricted_ref_pic_lists_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265SequenceParameterSetVui { pub flags: StdVideoH265SpsVuiFlags, pub aspect_ratio_idc: StdVideoH265AspectRatioIdc, pub sar_width: u16, pub sar_height: u16, pub video_format: u8, pub colour_primaries: u8, pub transfer_characteristics: u8, pub matrix_coeffs: u8, pub chroma_sample_loc_type_top_field: u8, pub chroma_sample_loc_type_bottom_field: u8, pub reserved1: u8, pub reserved2: u8, pub def_disp_win_left_offset: u16, pub def_disp_win_right_offset: u16, pub def_disp_win_top_offset: u16, pub def_disp_win_bottom_offset: u16, pub vui_num_units_in_tick: u32, pub vui_time_scale: u32, pub vui_num_ticks_poc_diff_one_minus1: u32, pub min_spatial_segmentation_idc: u16, pub reserved3: u16, pub max_bytes_per_pic_denom: u8, pub max_bits_per_min_cu_denom: u8, pub log2_max_mv_length_horizontal: u8, pub log2_max_mv_length_vertical: u8, pub pHrdParameters: *const StdVideoH265HrdParameters, } #[test] fn bindgen_test_layout_StdVideoH265SequenceParameterSetVui() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 56usize, concat!("Size of: ", stringify!(StdVideoH265SequenceParameterSetVui)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoH265SequenceParameterSetVui) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).aspect_ratio_idc) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(aspect_ratio_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sar_width) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(sar_width) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sar_height) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(sar_height) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).video_format) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(video_format) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).colour_primaries) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(colour_primaries) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).transfer_characteristics) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(transfer_characteristics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).matrix_coeffs) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(matrix_coeffs) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_sample_loc_type_top_field) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(chroma_sample_loc_type_top_field) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_sample_loc_type_bottom_field) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(chroma_sample_loc_type_bottom_field) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).def_disp_win_left_offset) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(def_disp_win_left_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).def_disp_win_right_offset) as usize - ptr as usize }, 22usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(def_disp_win_right_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).def_disp_win_top_offset) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(def_disp_win_top_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).def_disp_win_bottom_offset) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(def_disp_win_bottom_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vui_num_units_in_tick) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(vui_num_units_in_tick) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vui_time_scale) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(vui_time_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).vui_num_ticks_poc_diff_one_minus1) as usize - ptr as usize }, 36usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(vui_num_ticks_poc_diff_one_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).min_spatial_segmentation_idc) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(min_spatial_segmentation_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize }, 42usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(reserved3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_bytes_per_pic_denom) as usize - ptr as usize }, 44usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(max_bytes_per_pic_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_bits_per_min_cu_denom) as usize - ptr as usize }, 45usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(max_bits_per_min_cu_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_mv_length_horizontal) as usize - ptr as usize }, 46usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(log2_max_mv_length_horizontal) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_mv_length_vertical) as usize - ptr as usize }, 47usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(log2_max_mv_length_vertical) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize }, 48usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSetVui), "::", stringify!(pHrdParameters) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265PredictorPaletteEntries { pub PredictorPaletteEntries: [[u16; 128usize]; 3usize], } #[test] fn bindgen_test_layout_StdVideoH265PredictorPaletteEntries() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 768usize, concat!("Size of: ", stringify!(StdVideoH265PredictorPaletteEntries)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!( "Alignment of ", stringify!(StdVideoH265PredictorPaletteEntries) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PredictorPaletteEntries) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265PredictorPaletteEntries), "::", stringify!(PredictorPaletteEntries) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265SpsFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoH265SpsFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265SpsFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265SpsFlags)) ); } impl StdVideoH265SpsFlags { #[inline] pub fn sps_temporal_id_nesting_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_sps_temporal_id_nesting_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn separate_colour_plane_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_separate_colour_plane_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn conformance_window_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_conformance_window_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn sps_sub_layer_ordering_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_sps_sub_layer_ordering_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn scaling_list_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_scaling_list_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn sps_scaling_list_data_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_sps_scaling_list_data_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn amp_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_amp_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn sample_adaptive_offset_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_sample_adaptive_offset_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn pcm_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_pcm_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn pcm_loop_filter_disabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_pcm_loop_filter_disabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn long_term_ref_pics_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_long_term_ref_pics_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn sps_temporal_mvp_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_sps_temporal_mvp_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn strong_intra_smoothing_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_strong_intra_smoothing_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn vui_parameters_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_vui_parameters_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn sps_extension_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_sps_extension_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn sps_range_extension_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_sps_range_extension_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn transform_skip_rotation_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) } } #[inline] pub fn set_transform_skip_rotation_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(16usize, 1u8, val as u64) } } #[inline] pub fn transform_skip_context_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) } } #[inline] pub fn set_transform_skip_context_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(17usize, 1u8, val as u64) } } #[inline] pub fn implicit_rdpcm_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) } } #[inline] pub fn set_implicit_rdpcm_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(18usize, 1u8, val as u64) } } #[inline] pub fn explicit_rdpcm_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u32) } } #[inline] pub fn set_explicit_rdpcm_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(19usize, 1u8, val as u64) } } #[inline] pub fn extended_precision_processing_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u32) } } #[inline] pub fn set_extended_precision_processing_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(20usize, 1u8, val as u64) } } #[inline] pub fn intra_smoothing_disabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u32) } } #[inline] pub fn set_intra_smoothing_disabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(21usize, 1u8, val as u64) } } #[inline] pub fn high_precision_offsets_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u32) } } #[inline] pub fn set_high_precision_offsets_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(22usize, 1u8, val as u64) } } #[inline] pub fn persistent_rice_adaptation_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u32) } } #[inline] pub fn set_persistent_rice_adaptation_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(23usize, 1u8, val as u64) } } #[inline] pub fn cabac_bypass_alignment_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u32) } } #[inline] pub fn set_cabac_bypass_alignment_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(24usize, 1u8, val as u64) } } #[inline] pub fn sps_scc_extension_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u32) } } #[inline] pub fn set_sps_scc_extension_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(25usize, 1u8, val as u64) } } #[inline] pub fn sps_curr_pic_ref_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u32) } } #[inline] pub fn set_sps_curr_pic_ref_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(26usize, 1u8, val as u64) } } #[inline] pub fn palette_mode_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u32) } } #[inline] pub fn set_palette_mode_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(27usize, 1u8, val as u64) } } #[inline] pub fn sps_palette_predictor_initializers_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u32) } } #[inline] pub fn set_sps_palette_predictor_initializers_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(28usize, 1u8, val as u64) } } #[inline] pub fn intra_boundary_filtering_disabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u32) } } #[inline] pub fn set_intra_boundary_filtering_disabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(29usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( sps_temporal_id_nesting_flag: u32, separate_colour_plane_flag: u32, conformance_window_flag: u32, sps_sub_layer_ordering_info_present_flag: u32, scaling_list_enabled_flag: u32, sps_scaling_list_data_present_flag: u32, amp_enabled_flag: u32, sample_adaptive_offset_enabled_flag: u32, pcm_enabled_flag: u32, pcm_loop_filter_disabled_flag: u32, long_term_ref_pics_present_flag: u32, sps_temporal_mvp_enabled_flag: u32, strong_intra_smoothing_enabled_flag: u32, vui_parameters_present_flag: u32, sps_extension_present_flag: u32, sps_range_extension_flag: u32, transform_skip_rotation_enabled_flag: u32, transform_skip_context_enabled_flag: u32, implicit_rdpcm_enabled_flag: u32, explicit_rdpcm_enabled_flag: u32, extended_precision_processing_flag: u32, intra_smoothing_disabled_flag: u32, high_precision_offsets_enabled_flag: u32, persistent_rice_adaptation_enabled_flag: u32, cabac_bypass_alignment_enabled_flag: u32, sps_scc_extension_flag: u32, sps_curr_pic_ref_enabled_flag: u32, palette_mode_enabled_flag: u32, sps_palette_predictor_initializers_present_flag: u32, intra_boundary_filtering_disabled_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let sps_temporal_id_nesting_flag: u32 = unsafe { ::core::mem::transmute(sps_temporal_id_nesting_flag) }; sps_temporal_id_nesting_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let separate_colour_plane_flag: u32 = unsafe { ::core::mem::transmute(separate_colour_plane_flag) }; separate_colour_plane_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let conformance_window_flag: u32 = unsafe { ::core::mem::transmute(conformance_window_flag) }; conformance_window_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let sps_sub_layer_ordering_info_present_flag: u32 = unsafe { ::core::mem::transmute(sps_sub_layer_ordering_info_present_flag) }; sps_sub_layer_ordering_info_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let scaling_list_enabled_flag: u32 = unsafe { ::core::mem::transmute(scaling_list_enabled_flag) }; scaling_list_enabled_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let sps_scaling_list_data_present_flag: u32 = unsafe { ::core::mem::transmute(sps_scaling_list_data_present_flag) }; sps_scaling_list_data_present_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let amp_enabled_flag: u32 = unsafe { ::core::mem::transmute(amp_enabled_flag) }; amp_enabled_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let sample_adaptive_offset_enabled_flag: u32 = unsafe { ::core::mem::transmute(sample_adaptive_offset_enabled_flag) }; sample_adaptive_offset_enabled_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let pcm_enabled_flag: u32 = unsafe { ::core::mem::transmute(pcm_enabled_flag) }; pcm_enabled_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let pcm_loop_filter_disabled_flag: u32 = unsafe { ::core::mem::transmute(pcm_loop_filter_disabled_flag) }; pcm_loop_filter_disabled_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let long_term_ref_pics_present_flag: u32 = unsafe { ::core::mem::transmute(long_term_ref_pics_present_flag) }; long_term_ref_pics_present_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let sps_temporal_mvp_enabled_flag: u32 = unsafe { ::core::mem::transmute(sps_temporal_mvp_enabled_flag) }; sps_temporal_mvp_enabled_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let strong_intra_smoothing_enabled_flag: u32 = unsafe { ::core::mem::transmute(strong_intra_smoothing_enabled_flag) }; strong_intra_smoothing_enabled_flag as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let vui_parameters_present_flag: u32 = unsafe { ::core::mem::transmute(vui_parameters_present_flag) }; vui_parameters_present_flag as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let sps_extension_present_flag: u32 = unsafe { ::core::mem::transmute(sps_extension_present_flag) }; sps_extension_present_flag as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let sps_range_extension_flag: u32 = unsafe { ::core::mem::transmute(sps_range_extension_flag) }; sps_range_extension_flag as u64 }); __bindgen_bitfield_unit.set(16usize, 1u8, { let transform_skip_rotation_enabled_flag: u32 = unsafe { ::core::mem::transmute(transform_skip_rotation_enabled_flag) }; transform_skip_rotation_enabled_flag as u64 }); __bindgen_bitfield_unit.set(17usize, 1u8, { let transform_skip_context_enabled_flag: u32 = unsafe { ::core::mem::transmute(transform_skip_context_enabled_flag) }; transform_skip_context_enabled_flag as u64 }); __bindgen_bitfield_unit.set(18usize, 1u8, { let implicit_rdpcm_enabled_flag: u32 = unsafe { ::core::mem::transmute(implicit_rdpcm_enabled_flag) }; implicit_rdpcm_enabled_flag as u64 }); __bindgen_bitfield_unit.set(19usize, 1u8, { let explicit_rdpcm_enabled_flag: u32 = unsafe { ::core::mem::transmute(explicit_rdpcm_enabled_flag) }; explicit_rdpcm_enabled_flag as u64 }); __bindgen_bitfield_unit.set(20usize, 1u8, { let extended_precision_processing_flag: u32 = unsafe { ::core::mem::transmute(extended_precision_processing_flag) }; extended_precision_processing_flag as u64 }); __bindgen_bitfield_unit.set(21usize, 1u8, { let intra_smoothing_disabled_flag: u32 = unsafe { ::core::mem::transmute(intra_smoothing_disabled_flag) }; intra_smoothing_disabled_flag as u64 }); __bindgen_bitfield_unit.set(22usize, 1u8, { let high_precision_offsets_enabled_flag: u32 = unsafe { ::core::mem::transmute(high_precision_offsets_enabled_flag) }; high_precision_offsets_enabled_flag as u64 }); __bindgen_bitfield_unit.set(23usize, 1u8, { let persistent_rice_adaptation_enabled_flag: u32 = unsafe { ::core::mem::transmute(persistent_rice_adaptation_enabled_flag) }; persistent_rice_adaptation_enabled_flag as u64 }); __bindgen_bitfield_unit.set(24usize, 1u8, { let cabac_bypass_alignment_enabled_flag: u32 = unsafe { ::core::mem::transmute(cabac_bypass_alignment_enabled_flag) }; cabac_bypass_alignment_enabled_flag as u64 }); __bindgen_bitfield_unit.set(25usize, 1u8, { let sps_scc_extension_flag: u32 = unsafe { ::core::mem::transmute(sps_scc_extension_flag) }; sps_scc_extension_flag as u64 }); __bindgen_bitfield_unit.set(26usize, 1u8, { let sps_curr_pic_ref_enabled_flag: u32 = unsafe { ::core::mem::transmute(sps_curr_pic_ref_enabled_flag) }; sps_curr_pic_ref_enabled_flag as u64 }); __bindgen_bitfield_unit.set(27usize, 1u8, { let palette_mode_enabled_flag: u32 = unsafe { ::core::mem::transmute(palette_mode_enabled_flag) }; palette_mode_enabled_flag as u64 }); __bindgen_bitfield_unit.set(28usize, 1u8, { let sps_palette_predictor_initializers_present_flag: u32 = unsafe { ::core::mem::transmute(sps_palette_predictor_initializers_present_flag) }; sps_palette_predictor_initializers_present_flag as u64 }); __bindgen_bitfield_unit.set(29usize, 1u8, { let intra_boundary_filtering_disabled_flag: u32 = unsafe { ::core::mem::transmute(intra_boundary_filtering_disabled_flag) }; intra_boundary_filtering_disabled_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265ShortTermRefPicSetFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoH265ShortTermRefPicSetFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265ShortTermRefPicSetFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoH265ShortTermRefPicSetFlags) ) ); } impl StdVideoH265ShortTermRefPicSetFlags { #[inline] pub fn inter_ref_pic_set_prediction_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_inter_ref_pic_set_prediction_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn delta_rps_sign(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_delta_rps_sign(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( inter_ref_pic_set_prediction_flag: u32, delta_rps_sign: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let inter_ref_pic_set_prediction_flag: u32 = unsafe { ::core::mem::transmute(inter_ref_pic_set_prediction_flag) }; inter_ref_pic_set_prediction_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let delta_rps_sign: u32 = unsafe { ::core::mem::transmute(delta_rps_sign) }; delta_rps_sign as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265ShortTermRefPicSet { pub flags: StdVideoH265ShortTermRefPicSetFlags, pub delta_idx_minus1: u32, pub use_delta_flag: u16, pub abs_delta_rps_minus1: u16, pub used_by_curr_pic_flag: u16, pub used_by_curr_pic_s0_flag: u16, pub used_by_curr_pic_s1_flag: u16, pub reserved1: u16, pub reserved2: u8, pub reserved3: u8, pub num_negative_pics: u8, pub num_positive_pics: u8, pub delta_poc_s0_minus1: [u16; 16usize], pub delta_poc_s1_minus1: [u16; 16usize], } #[test] fn bindgen_test_layout_StdVideoH265ShortTermRefPicSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 88usize, concat!("Size of: ", stringify!(StdVideoH265ShortTermRefPicSet)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265ShortTermRefPicSet)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_idx_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(delta_idx_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).use_delta_flag) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(use_delta_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).abs_delta_rps_minus1) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(abs_delta_rps_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).used_by_curr_pic_flag) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(used_by_curr_pic_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).used_by_curr_pic_s0_flag) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(used_by_curr_pic_s0_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).used_by_curr_pic_s1_flag) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(used_by_curr_pic_s1_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize }, 21usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(reserved3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_negative_pics) as usize - ptr as usize }, 22usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(num_negative_pics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_positive_pics) as usize - ptr as usize }, 23usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(num_positive_pics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_poc_s0_minus1) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(delta_poc_s0_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_poc_s1_minus1) as usize - ptr as usize }, 56usize, concat!( "Offset of field: ", stringify!(StdVideoH265ShortTermRefPicSet), "::", stringify!(delta_poc_s1_minus1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265LongTermRefPicsSps { pub used_by_curr_pic_lt_sps_flag: u32, pub lt_ref_pic_poc_lsb_sps: [u32; 32usize], } #[test] fn bindgen_test_layout_StdVideoH265LongTermRefPicsSps() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 132usize, concat!("Size of: ", stringify!(StdVideoH265LongTermRefPicsSps)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265LongTermRefPicsSps)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).used_by_curr_pic_lt_sps_flag) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265LongTermRefPicsSps), "::", stringify!(used_by_curr_pic_lt_sps_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).lt_ref_pic_poc_lsb_sps) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265LongTermRefPicsSps), "::", stringify!(lt_ref_pic_poc_lsb_sps) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265SequenceParameterSet { pub flags: StdVideoH265SpsFlags, pub chroma_format_idc: StdVideoH265ChromaFormatIdc, pub pic_width_in_luma_samples: u32, pub pic_height_in_luma_samples: u32, pub sps_video_parameter_set_id: u8, pub sps_max_sub_layers_minus1: u8, pub sps_seq_parameter_set_id: u8, pub bit_depth_luma_minus8: u8, pub bit_depth_chroma_minus8: u8, pub log2_max_pic_order_cnt_lsb_minus4: u8, pub log2_min_luma_coding_block_size_minus3: u8, pub log2_diff_max_min_luma_coding_block_size: u8, pub log2_min_luma_transform_block_size_minus2: u8, pub log2_diff_max_min_luma_transform_block_size: u8, pub max_transform_hierarchy_depth_inter: u8, pub max_transform_hierarchy_depth_intra: u8, pub num_short_term_ref_pic_sets: u8, pub num_long_term_ref_pics_sps: u8, pub pcm_sample_bit_depth_luma_minus1: u8, pub pcm_sample_bit_depth_chroma_minus1: u8, pub log2_min_pcm_luma_coding_block_size_minus3: u8, pub log2_diff_max_min_pcm_luma_coding_block_size: u8, pub reserved1: u8, pub reserved2: u8, pub palette_max_size: u8, pub delta_palette_max_predictor_size: u8, pub motion_vector_resolution_control_idc: u8, pub sps_num_palette_predictor_initializers_minus1: u8, pub conf_win_left_offset: u32, pub conf_win_right_offset: u32, pub conf_win_top_offset: u32, pub conf_win_bottom_offset: u32, pub pProfileTierLevel: *const StdVideoH265ProfileTierLevel, pub pDecPicBufMgr: *const StdVideoH265DecPicBufMgr, pub pScalingLists: *const StdVideoH265ScalingLists, pub pShortTermRefPicSet: *const StdVideoH265ShortTermRefPicSet, pub pLongTermRefPicsSps: *const StdVideoH265LongTermRefPicsSps, pub pSequenceParameterSetVui: *const StdVideoH265SequenceParameterSetVui, pub pPredictorPaletteEntries: *const StdVideoH265PredictorPaletteEntries, } #[test] fn bindgen_test_layout_StdVideoH265SequenceParameterSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 112usize, concat!("Size of: ", stringify!(StdVideoH265SequenceParameterSet)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoH265SequenceParameterSet) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_format_idc) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(chroma_format_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_width_in_luma_samples) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pic_width_in_luma_samples) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_height_in_luma_samples) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pic_height_in_luma_samples) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(sps_video_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_max_sub_layers_minus1) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(sps_max_sub_layers_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_seq_parameter_set_id) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(sps_seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_depth_luma_minus8) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(bit_depth_luma_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).bit_depth_chroma_minus8) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(bit_depth_chroma_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_pic_order_cnt_lsb_minus4) as usize - ptr as usize }, 21usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_max_pic_order_cnt_lsb_minus4) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_min_luma_coding_block_size_minus3) as usize - ptr as usize }, 22usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_min_luma_coding_block_size_minus3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_diff_max_min_luma_coding_block_size) as usize - ptr as usize }, 23usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_diff_max_min_luma_coding_block_size) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_min_luma_transform_block_size_minus2) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_min_luma_transform_block_size_minus2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_diff_max_min_luma_transform_block_size) as usize - ptr as usize }, 25usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_diff_max_min_luma_transform_block_size) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_transform_hierarchy_depth_inter) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(max_transform_hierarchy_depth_inter) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_transform_hierarchy_depth_intra) as usize - ptr as usize }, 27usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(max_transform_hierarchy_depth_intra) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_short_term_ref_pic_sets) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(num_short_term_ref_pic_sets) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_long_term_ref_pics_sps) as usize - ptr as usize }, 29usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(num_long_term_ref_pics_sps) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pcm_sample_bit_depth_luma_minus1) as usize - ptr as usize }, 30usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pcm_sample_bit_depth_luma_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pcm_sample_bit_depth_chroma_minus1) as usize - ptr as usize }, 31usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pcm_sample_bit_depth_chroma_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_min_pcm_luma_coding_block_size_minus3) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_min_pcm_luma_coding_block_size_minus3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_diff_max_min_pcm_luma_coding_block_size) as usize - ptr as usize }, 33usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(log2_diff_max_min_pcm_luma_coding_block_size) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 34usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 35usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).palette_max_size) as usize - ptr as usize }, 36usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(palette_max_size) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_palette_max_predictor_size) as usize - ptr as usize }, 37usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(delta_palette_max_predictor_size) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).motion_vector_resolution_control_idc) as usize - ptr as usize }, 38usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(motion_vector_resolution_control_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_num_palette_predictor_initializers_minus1) as usize - ptr as usize }, 39usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(sps_num_palette_predictor_initializers_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).conf_win_left_offset) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(conf_win_left_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).conf_win_right_offset) as usize - ptr as usize }, 44usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(conf_win_right_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).conf_win_top_offset) as usize - ptr as usize }, 48usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(conf_win_top_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).conf_win_bottom_offset) as usize - ptr as usize }, 52usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(conf_win_bottom_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pProfileTierLevel) as usize - ptr as usize }, 56usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pProfileTierLevel) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pDecPicBufMgr) as usize - ptr as usize }, 64usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pDecPicBufMgr) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize }, 72usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pScalingLists) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pShortTermRefPicSet) as usize - ptr as usize }, 80usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pShortTermRefPicSet) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pLongTermRefPicsSps) as usize - ptr as usize }, 88usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pLongTermRefPicsSps) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pSequenceParameterSetVui) as usize - ptr as usize }, 96usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pSequenceParameterSetVui) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pPredictorPaletteEntries) as usize - ptr as usize }, 104usize, concat!( "Offset of field: ", stringify!(StdVideoH265SequenceParameterSet), "::", stringify!(pPredictorPaletteEntries) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265PpsFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoH265PpsFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoH265PpsFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoH265PpsFlags)) ); } impl StdVideoH265PpsFlags { #[inline] pub fn dependent_slice_segments_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_dependent_slice_segments_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn output_flag_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_output_flag_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn sign_data_hiding_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_sign_data_hiding_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn cabac_init_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_cabac_init_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn constrained_intra_pred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_constrained_intra_pred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn transform_skip_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_transform_skip_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn cu_qp_delta_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_cu_qp_delta_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn pps_slice_chroma_qp_offsets_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_pps_slice_chroma_qp_offsets_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn weighted_pred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_weighted_pred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn weighted_bipred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_weighted_bipred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn transquant_bypass_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_transquant_bypass_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn tiles_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_tiles_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn entropy_coding_sync_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_entropy_coding_sync_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn uniform_spacing_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_uniform_spacing_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn loop_filter_across_tiles_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_loop_filter_across_tiles_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn pps_loop_filter_across_slices_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_pps_loop_filter_across_slices_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn deblocking_filter_control_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) } } #[inline] pub fn set_deblocking_filter_control_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(16usize, 1u8, val as u64) } } #[inline] pub fn deblocking_filter_override_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) } } #[inline] pub fn set_deblocking_filter_override_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(17usize, 1u8, val as u64) } } #[inline] pub fn pps_deblocking_filter_disabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) } } #[inline] pub fn set_pps_deblocking_filter_disabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(18usize, 1u8, val as u64) } } #[inline] pub fn pps_scaling_list_data_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u32) } } #[inline] pub fn set_pps_scaling_list_data_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(19usize, 1u8, val as u64) } } #[inline] pub fn lists_modification_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u32) } } #[inline] pub fn set_lists_modification_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(20usize, 1u8, val as u64) } } #[inline] pub fn slice_segment_header_extension_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u32) } } #[inline] pub fn set_slice_segment_header_extension_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(21usize, 1u8, val as u64) } } #[inline] pub fn pps_extension_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u32) } } #[inline] pub fn set_pps_extension_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(22usize, 1u8, val as u64) } } #[inline] pub fn cross_component_prediction_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u32) } } #[inline] pub fn set_cross_component_prediction_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(23usize, 1u8, val as u64) } } #[inline] pub fn chroma_qp_offset_list_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u32) } } #[inline] pub fn set_chroma_qp_offset_list_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(24usize, 1u8, val as u64) } } #[inline] pub fn pps_curr_pic_ref_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u32) } } #[inline] pub fn set_pps_curr_pic_ref_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(25usize, 1u8, val as u64) } } #[inline] pub fn residual_adaptive_colour_transform_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u32) } } #[inline] pub fn set_residual_adaptive_colour_transform_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(26usize, 1u8, val as u64) } } #[inline] pub fn pps_slice_act_qp_offsets_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u32) } } #[inline] pub fn set_pps_slice_act_qp_offsets_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(27usize, 1u8, val as u64) } } #[inline] pub fn pps_palette_predictor_initializers_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u32) } } #[inline] pub fn set_pps_palette_predictor_initializers_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(28usize, 1u8, val as u64) } } #[inline] pub fn monochrome_palette_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u32) } } #[inline] pub fn set_monochrome_palette_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(29usize, 1u8, val as u64) } } #[inline] pub fn pps_range_extension_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(30usize, 1u8) as u32) } } #[inline] pub fn set_pps_range_extension_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(30usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( dependent_slice_segments_enabled_flag: u32, output_flag_present_flag: u32, sign_data_hiding_enabled_flag: u32, cabac_init_present_flag: u32, constrained_intra_pred_flag: u32, transform_skip_enabled_flag: u32, cu_qp_delta_enabled_flag: u32, pps_slice_chroma_qp_offsets_present_flag: u32, weighted_pred_flag: u32, weighted_bipred_flag: u32, transquant_bypass_enabled_flag: u32, tiles_enabled_flag: u32, entropy_coding_sync_enabled_flag: u32, uniform_spacing_flag: u32, loop_filter_across_tiles_enabled_flag: u32, pps_loop_filter_across_slices_enabled_flag: u32, deblocking_filter_control_present_flag: u32, deblocking_filter_override_enabled_flag: u32, pps_deblocking_filter_disabled_flag: u32, pps_scaling_list_data_present_flag: u32, lists_modification_present_flag: u32, slice_segment_header_extension_present_flag: u32, pps_extension_present_flag: u32, cross_component_prediction_enabled_flag: u32, chroma_qp_offset_list_enabled_flag: u32, pps_curr_pic_ref_enabled_flag: u32, residual_adaptive_colour_transform_enabled_flag: u32, pps_slice_act_qp_offsets_present_flag: u32, pps_palette_predictor_initializers_present_flag: u32, monochrome_palette_flag: u32, pps_range_extension_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let dependent_slice_segments_enabled_flag: u32 = unsafe { ::core::mem::transmute(dependent_slice_segments_enabled_flag) }; dependent_slice_segments_enabled_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let output_flag_present_flag: u32 = unsafe { ::core::mem::transmute(output_flag_present_flag) }; output_flag_present_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let sign_data_hiding_enabled_flag: u32 = unsafe { ::core::mem::transmute(sign_data_hiding_enabled_flag) }; sign_data_hiding_enabled_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let cabac_init_present_flag: u32 = unsafe { ::core::mem::transmute(cabac_init_present_flag) }; cabac_init_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let constrained_intra_pred_flag: u32 = unsafe { ::core::mem::transmute(constrained_intra_pred_flag) }; constrained_intra_pred_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let transform_skip_enabled_flag: u32 = unsafe { ::core::mem::transmute(transform_skip_enabled_flag) }; transform_skip_enabled_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let cu_qp_delta_enabled_flag: u32 = unsafe { ::core::mem::transmute(cu_qp_delta_enabled_flag) }; cu_qp_delta_enabled_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let pps_slice_chroma_qp_offsets_present_flag: u32 = unsafe { ::core::mem::transmute(pps_slice_chroma_qp_offsets_present_flag) }; pps_slice_chroma_qp_offsets_present_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let weighted_pred_flag: u32 = unsafe { ::core::mem::transmute(weighted_pred_flag) }; weighted_pred_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let weighted_bipred_flag: u32 = unsafe { ::core::mem::transmute(weighted_bipred_flag) }; weighted_bipred_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let transquant_bypass_enabled_flag: u32 = unsafe { ::core::mem::transmute(transquant_bypass_enabled_flag) }; transquant_bypass_enabled_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let tiles_enabled_flag: u32 = unsafe { ::core::mem::transmute(tiles_enabled_flag) }; tiles_enabled_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let entropy_coding_sync_enabled_flag: u32 = unsafe { ::core::mem::transmute(entropy_coding_sync_enabled_flag) }; entropy_coding_sync_enabled_flag as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let uniform_spacing_flag: u32 = unsafe { ::core::mem::transmute(uniform_spacing_flag) }; uniform_spacing_flag as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let loop_filter_across_tiles_enabled_flag: u32 = unsafe { ::core::mem::transmute(loop_filter_across_tiles_enabled_flag) }; loop_filter_across_tiles_enabled_flag as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let pps_loop_filter_across_slices_enabled_flag: u32 = unsafe { ::core::mem::transmute(pps_loop_filter_across_slices_enabled_flag) }; pps_loop_filter_across_slices_enabled_flag as u64 }); __bindgen_bitfield_unit.set(16usize, 1u8, { let deblocking_filter_control_present_flag: u32 = unsafe { ::core::mem::transmute(deblocking_filter_control_present_flag) }; deblocking_filter_control_present_flag as u64 }); __bindgen_bitfield_unit.set(17usize, 1u8, { let deblocking_filter_override_enabled_flag: u32 = unsafe { ::core::mem::transmute(deblocking_filter_override_enabled_flag) }; deblocking_filter_override_enabled_flag as u64 }); __bindgen_bitfield_unit.set(18usize, 1u8, { let pps_deblocking_filter_disabled_flag: u32 = unsafe { ::core::mem::transmute(pps_deblocking_filter_disabled_flag) }; pps_deblocking_filter_disabled_flag as u64 }); __bindgen_bitfield_unit.set(19usize, 1u8, { let pps_scaling_list_data_present_flag: u32 = unsafe { ::core::mem::transmute(pps_scaling_list_data_present_flag) }; pps_scaling_list_data_present_flag as u64 }); __bindgen_bitfield_unit.set(20usize, 1u8, { let lists_modification_present_flag: u32 = unsafe { ::core::mem::transmute(lists_modification_present_flag) }; lists_modification_present_flag as u64 }); __bindgen_bitfield_unit.set(21usize, 1u8, { let slice_segment_header_extension_present_flag: u32 = unsafe { ::core::mem::transmute(slice_segment_header_extension_present_flag) }; slice_segment_header_extension_present_flag as u64 }); __bindgen_bitfield_unit.set(22usize, 1u8, { let pps_extension_present_flag: u32 = unsafe { ::core::mem::transmute(pps_extension_present_flag) }; pps_extension_present_flag as u64 }); __bindgen_bitfield_unit.set(23usize, 1u8, { let cross_component_prediction_enabled_flag: u32 = unsafe { ::core::mem::transmute(cross_component_prediction_enabled_flag) }; cross_component_prediction_enabled_flag as u64 }); __bindgen_bitfield_unit.set(24usize, 1u8, { let chroma_qp_offset_list_enabled_flag: u32 = unsafe { ::core::mem::transmute(chroma_qp_offset_list_enabled_flag) }; chroma_qp_offset_list_enabled_flag as u64 }); __bindgen_bitfield_unit.set(25usize, 1u8, { let pps_curr_pic_ref_enabled_flag: u32 = unsafe { ::core::mem::transmute(pps_curr_pic_ref_enabled_flag) }; pps_curr_pic_ref_enabled_flag as u64 }); __bindgen_bitfield_unit.set(26usize, 1u8, { let residual_adaptive_colour_transform_enabled_flag: u32 = unsafe { ::core::mem::transmute(residual_adaptive_colour_transform_enabled_flag) }; residual_adaptive_colour_transform_enabled_flag as u64 }); __bindgen_bitfield_unit.set(27usize, 1u8, { let pps_slice_act_qp_offsets_present_flag: u32 = unsafe { ::core::mem::transmute(pps_slice_act_qp_offsets_present_flag) }; pps_slice_act_qp_offsets_present_flag as u64 }); __bindgen_bitfield_unit.set(28usize, 1u8, { let pps_palette_predictor_initializers_present_flag: u32 = unsafe { ::core::mem::transmute(pps_palette_predictor_initializers_present_flag) }; pps_palette_predictor_initializers_present_flag as u64 }); __bindgen_bitfield_unit.set(29usize, 1u8, { let monochrome_palette_flag: u32 = unsafe { ::core::mem::transmute(monochrome_palette_flag) }; monochrome_palette_flag as u64 }); __bindgen_bitfield_unit.set(30usize, 1u8, { let pps_range_extension_flag: u32 = unsafe { ::core::mem::transmute(pps_range_extension_flag) }; pps_range_extension_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoH265PictureParameterSet { pub flags: StdVideoH265PpsFlags, pub pps_pic_parameter_set_id: u8, pub pps_seq_parameter_set_id: u8, pub sps_video_parameter_set_id: u8, pub num_extra_slice_header_bits: u8, pub num_ref_idx_l0_default_active_minus1: u8, pub num_ref_idx_l1_default_active_minus1: u8, pub init_qp_minus26: i8, pub diff_cu_qp_delta_depth: u8, pub pps_cb_qp_offset: i8, pub pps_cr_qp_offset: i8, pub pps_beta_offset_div2: i8, pub pps_tc_offset_div2: i8, pub log2_parallel_merge_level_minus2: u8, pub log2_max_transform_skip_block_size_minus2: u8, pub diff_cu_chroma_qp_offset_depth: u8, pub chroma_qp_offset_list_len_minus1: u8, pub cb_qp_offset_list: [i8; 6usize], pub cr_qp_offset_list: [i8; 6usize], pub log2_sao_offset_scale_luma: u8, pub log2_sao_offset_scale_chroma: u8, pub pps_act_y_qp_offset_plus5: i8, pub pps_act_cb_qp_offset_plus5: i8, pub pps_act_cr_qp_offset_plus3: i8, pub pps_num_palette_predictor_initializers: u8, pub luma_bit_depth_entry_minus8: u8, pub chroma_bit_depth_entry_minus8: u8, pub num_tile_columns_minus1: u8, pub num_tile_rows_minus1: u8, pub reserved1: u8, pub reserved2: u8, pub column_width_minus1: [u16; 19usize], pub row_height_minus1: [u16; 21usize], pub reserved3: u32, pub pScalingLists: *const StdVideoH265ScalingLists, pub pPredictorPaletteEntries: *const StdVideoH265PredictorPaletteEntries, } #[test] fn bindgen_test_layout_StdVideoH265PictureParameterSet() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 144usize, concat!("Size of: ", stringify!(StdVideoH265PictureParameterSet)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoH265PictureParameterSet)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(sps_video_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_extra_slice_header_bits) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(num_extra_slice_header_bits) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l0_default_active_minus1) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(num_ref_idx_l0_default_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l1_default_active_minus1) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(num_ref_idx_l1_default_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).init_qp_minus26) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(init_qp_minus26) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).diff_cu_qp_delta_depth) as usize - ptr as usize }, 11usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(diff_cu_qp_delta_depth) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_cb_qp_offset) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_cb_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_cr_qp_offset) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_cr_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_beta_offset_div2) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_beta_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_tc_offset_div2) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_tc_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_parallel_merge_level_minus2) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(log2_parallel_merge_level_minus2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_max_transform_skip_block_size_minus2) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(log2_max_transform_skip_block_size_minus2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).diff_cu_chroma_qp_offset_depth) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(diff_cu_chroma_qp_offset_depth) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_qp_offset_list_len_minus1) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(chroma_qp_offset_list_len_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cb_qp_offset_list) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(cb_qp_offset_list) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cr_qp_offset_list) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(cr_qp_offset_list) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_sao_offset_scale_luma) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(log2_sao_offset_scale_luma) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).log2_sao_offset_scale_chroma) as usize - ptr as usize }, 33usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(log2_sao_offset_scale_chroma) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_act_y_qp_offset_plus5) as usize - ptr as usize }, 34usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_act_y_qp_offset_plus5) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_act_cb_qp_offset_plus5) as usize - ptr as usize }, 35usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_act_cb_qp_offset_plus5) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_act_cr_qp_offset_plus3) as usize - ptr as usize }, 36usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_act_cr_qp_offset_plus3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_num_palette_predictor_initializers) as usize - ptr as usize }, 37usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pps_num_palette_predictor_initializers) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_bit_depth_entry_minus8) as usize - ptr as usize }, 38usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(luma_bit_depth_entry_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_bit_depth_entry_minus8) as usize - ptr as usize }, 39usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(chroma_bit_depth_entry_minus8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_tile_columns_minus1) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(num_tile_columns_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_tile_rows_minus1) as usize - ptr as usize }, 41usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(num_tile_rows_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 42usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 43usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).column_width_minus1) as usize - ptr as usize }, 44usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(column_width_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).row_height_minus1) as usize - ptr as usize }, 82usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(row_height_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize }, 124usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(reserved3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize }, 128usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pScalingLists) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pPredictorPaletteEntries) as usize - ptr as usize }, 136usize, concat!( "Offset of field: ", stringify!(StdVideoH265PictureParameterSet), "::", stringify!(pPredictorPaletteEntries) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH265PictureInfoFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH265PictureInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoDecodeH265PictureInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeH265PictureInfoFlags) ) ); } impl StdVideoDecodeH265PictureInfoFlags { #[inline] pub fn IrapPicFlag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_IrapPicFlag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn IdrPicFlag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_IdrPicFlag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn IsReference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_IsReference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn short_term_ref_pic_set_sps_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_short_term_ref_pic_set_sps_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( IrapPicFlag: u32, IdrPicFlag: u32, IsReference: u32, short_term_ref_pic_set_sps_flag: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let IrapPicFlag: u32 = unsafe { ::core::mem::transmute(IrapPicFlag) }; IrapPicFlag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let IdrPicFlag: u32 = unsafe { ::core::mem::transmute(IdrPicFlag) }; IdrPicFlag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let IsReference: u32 = unsafe { ::core::mem::transmute(IsReference) }; IsReference as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let short_term_ref_pic_set_sps_flag: u32 = unsafe { ::core::mem::transmute(short_term_ref_pic_set_sps_flag) }; short_term_ref_pic_set_sps_flag as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH265PictureInfo { pub flags: StdVideoDecodeH265PictureInfoFlags, pub sps_video_parameter_set_id: u8, pub pps_seq_parameter_set_id: u8, pub pps_pic_parameter_set_id: u8, pub NumDeltaPocsOfRefRpsIdx: u8, pub PicOrderCntVal: i32, pub NumBitsForSTRefPicSetInSlice: u16, pub reserved: u16, pub RefPicSetStCurrBefore: [u8; 8usize], pub RefPicSetStCurrAfter: [u8; 8usize], pub RefPicSetLtCurr: [u8; 8usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH265PictureInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 40usize, concat!("Size of: ", stringify!(StdVideoDecodeH265PictureInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoDecodeH265PictureInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(sps_video_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(pps_seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(pps_pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).NumDeltaPocsOfRefRpsIdx) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(NumDeltaPocsOfRefRpsIdx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(PicOrderCntVal) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).NumBitsForSTRefPicSetInSlice) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(NumBitsForSTRefPicSetInSlice) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(reserved) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicSetStCurrBefore) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(RefPicSetStCurrBefore) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicSetStCurrAfter) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(RefPicSetStCurrAfter) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicSetLtCurr) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265PictureInfo), "::", stringify!(RefPicSetLtCurr) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH265ReferenceInfoFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, pub __bindgen_padding_0: [u8; 3usize], } #[test] fn bindgen_test_layout_StdVideoDecodeH265ReferenceInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoDecodeH265ReferenceInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeH265ReferenceInfoFlags) ) ); } impl StdVideoDecodeH265ReferenceInfoFlags { #[inline] pub fn used_for_long_term_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_used_for_long_term_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn unused_for_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_unused_for_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn new_bitfield_1( used_for_long_term_reference: u32, unused_for_reference: u32, ) -> __BindgenBitfieldUnit<[u8; 1usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let used_for_long_term_reference: u32 = unsafe { ::core::mem::transmute(used_for_long_term_reference) }; used_for_long_term_reference as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let unused_for_reference: u32 = unsafe { ::core::mem::transmute(unused_for_reference) }; unused_for_reference as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeH265ReferenceInfo { pub flags: StdVideoDecodeH265ReferenceInfoFlags, pub PicOrderCntVal: i32, } #[test] fn bindgen_test_layout_StdVideoDecodeH265ReferenceInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 8usize, concat!("Size of: ", stringify!(StdVideoDecodeH265ReferenceInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoDecodeH265ReferenceInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265ReferenceInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeH265ReferenceInfo), "::", stringify!(PicOrderCntVal) ) ); } pub const StdVideoAV1Profile_STD_VIDEO_AV1_PROFILE_MAIN: StdVideoAV1Profile = 0; pub const StdVideoAV1Profile_STD_VIDEO_AV1_PROFILE_HIGH: StdVideoAV1Profile = 1; pub const StdVideoAV1Profile_STD_VIDEO_AV1_PROFILE_PROFESSIONAL: StdVideoAV1Profile = 2; pub const StdVideoAV1Profile_STD_VIDEO_AV1_PROFILE_INVALID: StdVideoAV1Profile = 2147483647; pub type StdVideoAV1Profile = ::core::ffi::c_uint; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_2_0: StdVideoAV1Level = 0; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_2_1: StdVideoAV1Level = 1; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_2_2: StdVideoAV1Level = 2; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_2_3: StdVideoAV1Level = 3; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_3_0: StdVideoAV1Level = 4; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_3_1: StdVideoAV1Level = 5; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_3_2: StdVideoAV1Level = 6; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_3_3: StdVideoAV1Level = 7; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_4_0: StdVideoAV1Level = 8; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_4_1: StdVideoAV1Level = 9; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_4_2: StdVideoAV1Level = 10; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_4_3: StdVideoAV1Level = 11; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_5_0: StdVideoAV1Level = 12; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_5_1: StdVideoAV1Level = 13; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_5_2: StdVideoAV1Level = 14; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_5_3: StdVideoAV1Level = 15; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_6_0: StdVideoAV1Level = 16; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_6_1: StdVideoAV1Level = 17; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_6_2: StdVideoAV1Level = 18; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_6_3: StdVideoAV1Level = 19; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_7_0: StdVideoAV1Level = 20; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_7_1: StdVideoAV1Level = 21; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_7_2: StdVideoAV1Level = 22; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_7_3: StdVideoAV1Level = 23; pub const StdVideoAV1Level_STD_VIDEO_AV1_LEVEL_INVALID: StdVideoAV1Level = 2147483647; pub type StdVideoAV1Level = ::core::ffi::c_uint; pub const StdVideoAV1FrameType_STD_VIDEO_AV1_FRAME_TYPE_KEY: StdVideoAV1FrameType = 0; pub const StdVideoAV1FrameType_STD_VIDEO_AV1_FRAME_TYPE_INTER: StdVideoAV1FrameType = 1; pub const StdVideoAV1FrameType_STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY: StdVideoAV1FrameType = 2; pub const StdVideoAV1FrameType_STD_VIDEO_AV1_FRAME_TYPE_SWITCH: StdVideoAV1FrameType = 3; pub const StdVideoAV1FrameType_STD_VIDEO_AV1_FRAME_TYPE_INVALID: StdVideoAV1FrameType = 2147483647; pub type StdVideoAV1FrameType = ::core::ffi::c_uint; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP: StdVideoAV1InterpolationFilter = 0; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH: StdVideoAV1InterpolationFilter = 1; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP: StdVideoAV1InterpolationFilter = 2; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR: StdVideoAV1InterpolationFilter = 3; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE: StdVideoAV1InterpolationFilter = 4; pub const StdVideoAV1InterpolationFilter_STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID: StdVideoAV1InterpolationFilter = 2147483647; pub type StdVideoAV1InterpolationFilter = ::core::ffi::c_uint; pub const StdVideoAV1TxMode_STD_VIDEO_AV1_TX_MODE_ONLY_4X4: StdVideoAV1TxMode = 0; pub const StdVideoAV1TxMode_STD_VIDEO_AV1_TX_MODE_LARGEST: StdVideoAV1TxMode = 1; pub const StdVideoAV1TxMode_STD_VIDEO_AV1_TX_MODE_SELECT: StdVideoAV1TxMode = 2; pub const StdVideoAV1TxMode_STD_VIDEO_AV1_TX_MODE_INVALID: StdVideoAV1TxMode = 2147483647; pub type StdVideoAV1TxMode = ::core::ffi::c_uint; pub const StdVideoAV1FrameRestorationType_STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE: StdVideoAV1FrameRestorationType = 0; pub const StdVideoAV1FrameRestorationType_STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER: StdVideoAV1FrameRestorationType = 1; pub const StdVideoAV1FrameRestorationType_STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ: StdVideoAV1FrameRestorationType = 2; pub const StdVideoAV1FrameRestorationType_STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE: StdVideoAV1FrameRestorationType = 3; pub const StdVideoAV1FrameRestorationType_STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID: StdVideoAV1FrameRestorationType = 2147483647; pub type StdVideoAV1FrameRestorationType = ::core::ffi::c_uint; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709: StdVideoAV1ColorPrimaries = 1; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED: StdVideoAV1ColorPrimaries = 2; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M: StdVideoAV1ColorPrimaries = 4; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G: StdVideoAV1ColorPrimaries = 5; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601: StdVideoAV1ColorPrimaries = 6; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240: StdVideoAV1ColorPrimaries = 7; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM: StdVideoAV1ColorPrimaries = 8; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020: StdVideoAV1ColorPrimaries = 9; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ: StdVideoAV1ColorPrimaries = 10; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431: StdVideoAV1ColorPrimaries = 11; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432: StdVideoAV1ColorPrimaries = 12; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213: StdVideoAV1ColorPrimaries = 22; pub const StdVideoAV1ColorPrimaries_STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID: StdVideoAV1ColorPrimaries = 2147483647; pub type StdVideoAV1ColorPrimaries = ::core::ffi::c_uint; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0: StdVideoAV1TransferCharacteristics = 0; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709: StdVideoAV1TransferCharacteristics = 1; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED: StdVideoAV1TransferCharacteristics = 2; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3: StdVideoAV1TransferCharacteristics = 3; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M: StdVideoAV1TransferCharacteristics = 4; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G: StdVideoAV1TransferCharacteristics = 5; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601: StdVideoAV1TransferCharacteristics = 6; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240: StdVideoAV1TransferCharacteristics = 7; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR: StdVideoAV1TransferCharacteristics = 8; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100: StdVideoAV1TransferCharacteristics = 9; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10 : StdVideoAV1TransferCharacteristics = 10 ; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966: StdVideoAV1TransferCharacteristics = 11; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361: StdVideoAV1TransferCharacteristics = 12; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB: StdVideoAV1TransferCharacteristics = 13; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT : StdVideoAV1TransferCharacteristics = 14 ; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT : StdVideoAV1TransferCharacteristics = 15 ; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084: StdVideoAV1TransferCharacteristics = 16; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428: StdVideoAV1TransferCharacteristics = 17; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG: StdVideoAV1TransferCharacteristics = 18; pub const StdVideoAV1TransferCharacteristics_STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID: StdVideoAV1TransferCharacteristics = 2147483647; pub type StdVideoAV1TransferCharacteristics = ::core::ffi::c_uint; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY: StdVideoAV1MatrixCoefficients = 0; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709: StdVideoAV1MatrixCoefficients = 1; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED: StdVideoAV1MatrixCoefficients = 2; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3: StdVideoAV1MatrixCoefficients = 3; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC: StdVideoAV1MatrixCoefficients = 4; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G: StdVideoAV1MatrixCoefficients = 5; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601: StdVideoAV1MatrixCoefficients = 6; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240: StdVideoAV1MatrixCoefficients = 7; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO: StdVideoAV1MatrixCoefficients = 8; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL: StdVideoAV1MatrixCoefficients = 9; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL: StdVideoAV1MatrixCoefficients = 10; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085: StdVideoAV1MatrixCoefficients = 11; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL: StdVideoAV1MatrixCoefficients = 12; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL: StdVideoAV1MatrixCoefficients = 13; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP: StdVideoAV1MatrixCoefficients = 14; pub const StdVideoAV1MatrixCoefficients_STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID: StdVideoAV1MatrixCoefficients = 2147483647; pub type StdVideoAV1MatrixCoefficients = ::core::ffi::c_uint; pub const StdVideoAV1ChromaSamplePosition_STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN: StdVideoAV1ChromaSamplePosition = 0; pub const StdVideoAV1ChromaSamplePosition_STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL: StdVideoAV1ChromaSamplePosition = 1; pub const StdVideoAV1ChromaSamplePosition_STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED: StdVideoAV1ChromaSamplePosition = 2; pub const StdVideoAV1ChromaSamplePosition_STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED: StdVideoAV1ChromaSamplePosition = 3; pub const StdVideoAV1ChromaSamplePosition_STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID: StdVideoAV1ChromaSamplePosition = 2147483647; pub type StdVideoAV1ChromaSamplePosition = ::core::ffi::c_uint; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1ColorConfigFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1ColorConfigFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1ColorConfigFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1ColorConfigFlags)) ); } impl StdVideoAV1ColorConfigFlags { #[inline] pub fn mono_chrome(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_mono_chrome(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn color_range(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_color_range(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn separate_uv_delta_q(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_separate_uv_delta_q(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn color_description_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_color_description_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 28u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 28u8, val as u64) } } #[inline] pub fn new_bitfield_1( mono_chrome: u32, color_range: u32, separate_uv_delta_q: u32, color_description_present_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let mono_chrome: u32 = unsafe { ::core::mem::transmute(mono_chrome) }; mono_chrome as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let color_range: u32 = unsafe { ::core::mem::transmute(color_range) }; color_range as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let separate_uv_delta_q: u32 = unsafe { ::core::mem::transmute(separate_uv_delta_q) }; separate_uv_delta_q as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let color_description_present_flag: u32 = unsafe { ::core::mem::transmute(color_description_present_flag) }; color_description_present_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 28u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1ColorConfig { pub flags: StdVideoAV1ColorConfigFlags, pub BitDepth: u8, pub subsampling_x: u8, pub subsampling_y: u8, pub reserved1: u8, pub color_primaries: StdVideoAV1ColorPrimaries, pub transfer_characteristics: StdVideoAV1TransferCharacteristics, pub matrix_coefficients: StdVideoAV1MatrixCoefficients, pub chroma_sample_position: StdVideoAV1ChromaSamplePosition, } #[test] fn bindgen_test_layout_StdVideoAV1ColorConfig() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 24usize, concat!("Size of: ", stringify!(StdVideoAV1ColorConfig)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1ColorConfig)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).BitDepth) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(BitDepth) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).subsampling_x) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(subsampling_x) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).subsampling_y) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(subsampling_y) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).color_primaries) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(color_primaries) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).transfer_characteristics) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(transfer_characteristics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).matrix_coefficients) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(matrix_coefficients) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_sample_position) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoAV1ColorConfig), "::", stringify!(chroma_sample_position) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1TimingInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1TimingInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1TimingInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1TimingInfoFlags)) ); } impl StdVideoAV1TimingInfoFlags { #[inline] pub fn equal_picture_interval(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_equal_picture_interval(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 31u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 31u8, val as u64) } } #[inline] pub fn new_bitfield_1( equal_picture_interval: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let equal_picture_interval: u32 = unsafe { ::core::mem::transmute(equal_picture_interval) }; equal_picture_interval as u64 }); __bindgen_bitfield_unit.set(1usize, 31u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1TimingInfo { pub flags: StdVideoAV1TimingInfoFlags, pub num_units_in_display_tick: u32, pub time_scale: u32, pub num_ticks_per_picture_minus_1: u32, } #[test] fn bindgen_test_layout_StdVideoAV1TimingInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoAV1TimingInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1TimingInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TimingInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_units_in_display_tick) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TimingInfo), "::", stringify!(num_units_in_display_tick) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).time_scale) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TimingInfo), "::", stringify!(time_scale) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ticks_per_picture_minus_1) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TimingInfo), "::", stringify!(num_ticks_per_picture_minus_1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1LoopFilterFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1LoopFilterFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1LoopFilterFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1LoopFilterFlags)) ); } impl StdVideoAV1LoopFilterFlags { #[inline] pub fn loop_filter_delta_enabled(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_loop_filter_delta_enabled(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn loop_filter_delta_update(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_loop_filter_delta_update(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( loop_filter_delta_enabled: u32, loop_filter_delta_update: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let loop_filter_delta_enabled: u32 = unsafe { ::core::mem::transmute(loop_filter_delta_enabled) }; loop_filter_delta_enabled as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let loop_filter_delta_update: u32 = unsafe { ::core::mem::transmute(loop_filter_delta_update) }; loop_filter_delta_update as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1LoopFilter { pub flags: StdVideoAV1LoopFilterFlags, pub loop_filter_level: [u8; 4usize], pub loop_filter_sharpness: u8, pub update_ref_delta: u8, pub loop_filter_ref_deltas: [i8; 8usize], pub update_mode_delta: u8, pub loop_filter_mode_deltas: [i8; 2usize], } #[test] fn bindgen_test_layout_StdVideoAV1LoopFilter() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 24usize, concat!("Size of: ", stringify!(StdVideoAV1LoopFilter)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1LoopFilter)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).loop_filter_level) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(loop_filter_level) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).loop_filter_sharpness) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(loop_filter_sharpness) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).update_ref_delta) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(update_ref_delta) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).loop_filter_ref_deltas) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(loop_filter_ref_deltas) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).update_mode_delta) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(update_mode_delta) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).loop_filter_mode_deltas) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopFilter), "::", stringify!(loop_filter_mode_deltas) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1QuantizationFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1QuantizationFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1QuantizationFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1QuantizationFlags)) ); } impl StdVideoAV1QuantizationFlags { #[inline] pub fn using_qmatrix(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_using_qmatrix(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn diff_uv_delta(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_diff_uv_delta(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( using_qmatrix: u32, diff_uv_delta: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let using_qmatrix: u32 = unsafe { ::core::mem::transmute(using_qmatrix) }; using_qmatrix as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let diff_uv_delta: u32 = unsafe { ::core::mem::transmute(diff_uv_delta) }; diff_uv_delta as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1Quantization { pub flags: StdVideoAV1QuantizationFlags, pub base_q_idx: u8, pub DeltaQYDc: i8, pub DeltaQUDc: i8, pub DeltaQUAc: i8, pub DeltaQVDc: i8, pub DeltaQVAc: i8, pub qm_y: u8, pub qm_u: u8, pub qm_v: u8, } #[test] fn bindgen_test_layout_StdVideoAV1Quantization() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoAV1Quantization)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1Quantization)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).base_q_idx) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(base_q_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).DeltaQYDc) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(DeltaQYDc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).DeltaQUDc) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(DeltaQUDc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).DeltaQUAc) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(DeltaQUAc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).DeltaQVDc) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(DeltaQVDc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).DeltaQVAc) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(DeltaQVAc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).qm_y) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(qm_y) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).qm_u) as usize - ptr as usize }, 11usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(qm_u) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).qm_v) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Quantization), "::", stringify!(qm_v) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1Segmentation { pub FeatureEnabled: [u8; 8usize], pub FeatureData: [[i16; 8usize]; 8usize], } #[test] fn bindgen_test_layout_StdVideoAV1Segmentation() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 136usize, concat!("Size of: ", stringify!(StdVideoAV1Segmentation)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!("Alignment of ", stringify!(StdVideoAV1Segmentation)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).FeatureEnabled) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Segmentation), "::", stringify!(FeatureEnabled) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).FeatureData) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1Segmentation), "::", stringify!(FeatureData) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1TileInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1TileInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1TileInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1TileInfoFlags)) ); } impl StdVideoAV1TileInfoFlags { #[inline] pub fn uniform_tile_spacing_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_uniform_tile_spacing_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 31u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 31u8, val as u64) } } #[inline] pub fn new_bitfield_1( uniform_tile_spacing_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let uniform_tile_spacing_flag: u32 = unsafe { ::core::mem::transmute(uniform_tile_spacing_flag) }; uniform_tile_spacing_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 31u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1TileInfo { pub flags: StdVideoAV1TileInfoFlags, pub TileCols: u8, pub TileRows: u8, pub context_update_tile_id: u16, pub tile_size_bytes_minus_1: u8, pub reserved1: [u8; 7usize], pub pMiColStarts: *const u16, pub pMiRowStarts: *const u16, pub pWidthInSbsMinus1: *const u16, pub pHeightInSbsMinus1: *const u16, } #[test] fn bindgen_test_layout_StdVideoAV1TileInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 48usize, concat!("Size of: ", stringify!(StdVideoAV1TileInfo)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoAV1TileInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).TileCols) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(TileCols) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).TileRows) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(TileRows) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).context_update_tile_id) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(context_update_tile_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).tile_size_bytes_minus_1) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(tile_size_bytes_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pMiColStarts) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(pMiColStarts) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pMiRowStarts) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(pMiRowStarts) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pWidthInSbsMinus1) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(pWidthInSbsMinus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pHeightInSbsMinus1) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoAV1TileInfo), "::", stringify!(pHeightInSbsMinus1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1CDEF { pub cdef_damping_minus_3: u8, pub cdef_bits: u8, pub cdef_y_pri_strength: [u8; 8usize], pub cdef_y_sec_strength: [u8; 8usize], pub cdef_uv_pri_strength: [u8; 8usize], pub cdef_uv_sec_strength: [u8; 8usize], } #[test] fn bindgen_test_layout_StdVideoAV1CDEF() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 34usize, concat!("Size of: ", stringify!(StdVideoAV1CDEF)) ); assert_eq!( ::core::mem::align_of::(), 1usize, concat!("Alignment of ", stringify!(StdVideoAV1CDEF)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_damping_minus_3) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_damping_minus_3) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_bits) as usize - ptr as usize }, 1usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_bits) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_y_pri_strength) as usize - ptr as usize }, 2usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_y_pri_strength) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_y_sec_strength) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_y_sec_strength) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_uv_pri_strength) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_uv_pri_strength) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cdef_uv_sec_strength) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoAV1CDEF), "::", stringify!(cdef_uv_sec_strength) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1LoopRestoration { pub FrameRestorationType: [StdVideoAV1FrameRestorationType; 3usize], pub LoopRestorationSize: [u16; 3usize], } #[test] fn bindgen_test_layout_StdVideoAV1LoopRestoration() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 20usize, concat!("Size of: ", stringify!(StdVideoAV1LoopRestoration)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1LoopRestoration)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).FrameRestorationType) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopRestoration), "::", stringify!(FrameRestorationType) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).LoopRestorationSize) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1LoopRestoration), "::", stringify!(LoopRestorationSize) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1GlobalMotion { pub GmType: [u8; 8usize], pub gm_params: [[i32; 6usize]; 8usize], } #[test] fn bindgen_test_layout_StdVideoAV1GlobalMotion() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 200usize, concat!("Size of: ", stringify!(StdVideoAV1GlobalMotion)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1GlobalMotion)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).GmType) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1GlobalMotion), "::", stringify!(GmType) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).gm_params) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1GlobalMotion), "::", stringify!(gm_params) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1FilmGrainFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1FilmGrainFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1FilmGrainFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1FilmGrainFlags)) ); } impl StdVideoAV1FilmGrainFlags { #[inline] pub fn chroma_scaling_from_luma(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_chroma_scaling_from_luma(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn overlap_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_overlap_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn clip_to_restricted_range(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_clip_to_restricted_range(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn update_grain(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_update_grain(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 28u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 28u8, val as u64) } } #[inline] pub fn new_bitfield_1( chroma_scaling_from_luma: u32, overlap_flag: u32, clip_to_restricted_range: u32, update_grain: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let chroma_scaling_from_luma: u32 = unsafe { ::core::mem::transmute(chroma_scaling_from_luma) }; chroma_scaling_from_luma as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let overlap_flag: u32 = unsafe { ::core::mem::transmute(overlap_flag) }; overlap_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let clip_to_restricted_range: u32 = unsafe { ::core::mem::transmute(clip_to_restricted_range) }; clip_to_restricted_range as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let update_grain: u32 = unsafe { ::core::mem::transmute(update_grain) }; update_grain as u64 }); __bindgen_bitfield_unit.set(4usize, 28u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1FilmGrain { pub flags: StdVideoAV1FilmGrainFlags, pub grain_scaling_minus_8: u8, pub ar_coeff_lag: u8, pub ar_coeff_shift_minus_6: u8, pub grain_scale_shift: u8, pub grain_seed: u16, pub film_grain_params_ref_idx: u8, pub num_y_points: u8, pub point_y_value: [u8; 14usize], pub point_y_scaling: [u8; 14usize], pub num_cb_points: u8, pub point_cb_value: [u8; 10usize], pub point_cb_scaling: [u8; 10usize], pub num_cr_points: u8, pub point_cr_value: [u8; 10usize], pub point_cr_scaling: [u8; 10usize], pub ar_coeffs_y_plus_128: [i8; 24usize], pub ar_coeffs_cb_plus_128: [i8; 25usize], pub ar_coeffs_cr_plus_128: [i8; 25usize], pub cb_mult: u8, pub cb_luma_mult: u8, pub cb_offset: u16, pub cr_mult: u8, pub cr_luma_mult: u8, pub cr_offset: u16, } #[test] fn bindgen_test_layout_StdVideoAV1FilmGrain() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 164usize, concat!("Size of: ", stringify!(StdVideoAV1FilmGrain)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1FilmGrain)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).grain_scaling_minus_8) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(grain_scaling_minus_8) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ar_coeff_lag) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(ar_coeff_lag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ar_coeff_shift_minus_6) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(ar_coeff_shift_minus_6) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).grain_scale_shift) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(grain_scale_shift) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).grain_seed) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(grain_seed) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).film_grain_params_ref_idx) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(film_grain_params_ref_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_y_points) as usize - ptr as usize }, 11usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(num_y_points) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_y_value) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_y_value) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_y_scaling) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_y_scaling) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_cb_points) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(num_cb_points) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_cb_value) as usize - ptr as usize }, 41usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_cb_value) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_cb_scaling) as usize - ptr as usize }, 51usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_cb_scaling) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_cr_points) as usize - ptr as usize }, 61usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(num_cr_points) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_cr_value) as usize - ptr as usize }, 62usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_cr_value) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).point_cr_scaling) as usize - ptr as usize }, 72usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(point_cr_scaling) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ar_coeffs_y_plus_128) as usize - ptr as usize }, 82usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(ar_coeffs_y_plus_128) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ar_coeffs_cb_plus_128) as usize - ptr as usize }, 106usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(ar_coeffs_cb_plus_128) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).ar_coeffs_cr_plus_128) as usize - ptr as usize }, 131usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(ar_coeffs_cr_plus_128) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cb_mult) as usize - ptr as usize }, 156usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cb_mult) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cb_luma_mult) as usize - ptr as usize }, 157usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cb_luma_mult) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cb_offset) as usize - ptr as usize }, 158usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cb_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cr_mult) as usize - ptr as usize }, 160usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cr_mult) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cr_luma_mult) as usize - ptr as usize }, 161usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cr_luma_mult) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cr_offset) as usize - ptr as usize }, 162usize, concat!( "Offset of field: ", stringify!(StdVideoAV1FilmGrain), "::", stringify!(cr_offset) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1SequenceHeaderFlags { pub _bitfield_align_1: [u16; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoAV1SequenceHeaderFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoAV1SequenceHeaderFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoAV1SequenceHeaderFlags)) ); } impl StdVideoAV1SequenceHeaderFlags { #[inline] pub fn still_picture(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_still_picture(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn reduced_still_picture_header(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_reduced_still_picture_header(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn use_128x128_superblock(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_use_128x128_superblock(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn enable_filter_intra(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_enable_filter_intra(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn enable_intra_edge_filter(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_enable_intra_edge_filter(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn enable_interintra_compound(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_enable_interintra_compound(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn enable_masked_compound(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_enable_masked_compound(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn enable_warped_motion(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_enable_warped_motion(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn enable_dual_filter(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_enable_dual_filter(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn enable_order_hint(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_enable_order_hint(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn enable_jnt_comp(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_enable_jnt_comp(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn enable_ref_frame_mvs(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_enable_ref_frame_mvs(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn frame_id_numbers_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_frame_id_numbers_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn enable_superres(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_enable_superres(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn enable_cdef(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_enable_cdef(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn enable_restoration(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_enable_restoration(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn film_grain_params_present(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) } } #[inline] pub fn set_film_grain_params_present(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(16usize, 1u8, val as u64) } } #[inline] pub fn timing_info_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) } } #[inline] pub fn set_timing_info_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(17usize, 1u8, val as u64) } } #[inline] pub fn initial_display_delay_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) } } #[inline] pub fn set_initial_display_delay_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(18usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 13u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(19usize, 13u8, val as u64) } } #[inline] pub fn new_bitfield_1( still_picture: u32, reduced_still_picture_header: u32, use_128x128_superblock: u32, enable_filter_intra: u32, enable_intra_edge_filter: u32, enable_interintra_compound: u32, enable_masked_compound: u32, enable_warped_motion: u32, enable_dual_filter: u32, enable_order_hint: u32, enable_jnt_comp: u32, enable_ref_frame_mvs: u32, frame_id_numbers_present_flag: u32, enable_superres: u32, enable_cdef: u32, enable_restoration: u32, film_grain_params_present: u32, timing_info_present_flag: u32, initial_display_delay_present_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let still_picture: u32 = unsafe { ::core::mem::transmute(still_picture) }; still_picture as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let reduced_still_picture_header: u32 = unsafe { ::core::mem::transmute(reduced_still_picture_header) }; reduced_still_picture_header as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let use_128x128_superblock: u32 = unsafe { ::core::mem::transmute(use_128x128_superblock) }; use_128x128_superblock as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let enable_filter_intra: u32 = unsafe { ::core::mem::transmute(enable_filter_intra) }; enable_filter_intra as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let enable_intra_edge_filter: u32 = unsafe { ::core::mem::transmute(enable_intra_edge_filter) }; enable_intra_edge_filter as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let enable_interintra_compound: u32 = unsafe { ::core::mem::transmute(enable_interintra_compound) }; enable_interintra_compound as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let enable_masked_compound: u32 = unsafe { ::core::mem::transmute(enable_masked_compound) }; enable_masked_compound as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let enable_warped_motion: u32 = unsafe { ::core::mem::transmute(enable_warped_motion) }; enable_warped_motion as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let enable_dual_filter: u32 = unsafe { ::core::mem::transmute(enable_dual_filter) }; enable_dual_filter as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let enable_order_hint: u32 = unsafe { ::core::mem::transmute(enable_order_hint) }; enable_order_hint as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let enable_jnt_comp: u32 = unsafe { ::core::mem::transmute(enable_jnt_comp) }; enable_jnt_comp as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let enable_ref_frame_mvs: u32 = unsafe { ::core::mem::transmute(enable_ref_frame_mvs) }; enable_ref_frame_mvs as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let frame_id_numbers_present_flag: u32 = unsafe { ::core::mem::transmute(frame_id_numbers_present_flag) }; frame_id_numbers_present_flag as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let enable_superres: u32 = unsafe { ::core::mem::transmute(enable_superres) }; enable_superres as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let enable_cdef: u32 = unsafe { ::core::mem::transmute(enable_cdef) }; enable_cdef as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let enable_restoration: u32 = unsafe { ::core::mem::transmute(enable_restoration) }; enable_restoration as u64 }); __bindgen_bitfield_unit.set(16usize, 1u8, { let film_grain_params_present: u32 = unsafe { ::core::mem::transmute(film_grain_params_present) }; film_grain_params_present as u64 }); __bindgen_bitfield_unit.set(17usize, 1u8, { let timing_info_present_flag: u32 = unsafe { ::core::mem::transmute(timing_info_present_flag) }; timing_info_present_flag as u64 }); __bindgen_bitfield_unit.set(18usize, 1u8, { let initial_display_delay_present_flag: u32 = unsafe { ::core::mem::transmute(initial_display_delay_present_flag) }; initial_display_delay_present_flag as u64 }); __bindgen_bitfield_unit.set(19usize, 13u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoAV1SequenceHeader { pub flags: StdVideoAV1SequenceHeaderFlags, pub seq_profile: StdVideoAV1Profile, pub frame_width_bits_minus_1: u8, pub frame_height_bits_minus_1: u8, pub max_frame_width_minus_1: u16, pub max_frame_height_minus_1: u16, pub delta_frame_id_length_minus_2: u8, pub additional_frame_id_length_minus_1: u8, pub order_hint_bits_minus_1: u8, pub seq_force_integer_mv: u8, pub seq_force_screen_content_tools: u8, pub reserved1: [u8; 5usize], pub pColorConfig: *const StdVideoAV1ColorConfig, pub pTimingInfo: *const StdVideoAV1TimingInfo, } #[test] fn bindgen_test_layout_StdVideoAV1SequenceHeader() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 40usize, concat!("Size of: ", stringify!(StdVideoAV1SequenceHeader)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoAV1SequenceHeader)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_profile) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(seq_profile) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_width_bits_minus_1) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(frame_width_bits_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_height_bits_minus_1) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(frame_height_bits_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_frame_width_minus_1) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(max_frame_width_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_frame_height_minus_1) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(max_frame_height_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_frame_id_length_minus_2) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(delta_frame_id_length_minus_2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).additional_frame_id_length_minus_1) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(additional_frame_id_length_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).order_hint_bits_minus_1) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(order_hint_bits_minus_1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_force_integer_mv) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(seq_force_integer_mv) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_force_screen_content_tools) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(seq_force_screen_content_tools) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pColorConfig) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(pColorConfig) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pTimingInfo) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoAV1SequenceHeader), "::", stringify!(pTimingInfo) ) ); } #[repr(C)] #[repr(align(4))] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeAV1PictureInfoFlags { pub _bitfield_align_1: [u8; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoDecodeAV1PictureInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoDecodeAV1PictureInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeAV1PictureInfoFlags) ) ); } impl StdVideoDecodeAV1PictureInfoFlags { #[inline] pub fn error_resilient_mode(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_error_resilient_mode(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn disable_cdf_update(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_disable_cdf_update(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn use_superres(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_use_superres(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn render_and_frame_size_different(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_render_and_frame_size_different(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn allow_screen_content_tools(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_allow_screen_content_tools(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn is_filter_switchable(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_is_filter_switchable(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn force_integer_mv(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_force_integer_mv(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn frame_size_override_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_frame_size_override_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn buffer_removal_time_present_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_buffer_removal_time_present_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn allow_intrabc(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_allow_intrabc(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn frame_refs_short_signaling(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_frame_refs_short_signaling(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn allow_high_precision_mv(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_allow_high_precision_mv(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn is_motion_mode_switchable(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) } } #[inline] pub fn set_is_motion_mode_switchable(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 1u8, val as u64) } } #[inline] pub fn use_ref_frame_mvs(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) } } #[inline] pub fn set_use_ref_frame_mvs(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(13usize, 1u8, val as u64) } } #[inline] pub fn disable_frame_end_update_cdf(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) } } #[inline] pub fn set_disable_frame_end_update_cdf(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(14usize, 1u8, val as u64) } } #[inline] pub fn allow_warped_motion(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) } } #[inline] pub fn set_allow_warped_motion(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(15usize, 1u8, val as u64) } } #[inline] pub fn reduced_tx_set(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) } } #[inline] pub fn set_reduced_tx_set(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(16usize, 1u8, val as u64) } } #[inline] pub fn reference_select(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) } } #[inline] pub fn set_reference_select(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(17usize, 1u8, val as u64) } } #[inline] pub fn skip_mode_present(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) } } #[inline] pub fn set_skip_mode_present(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(18usize, 1u8, val as u64) } } #[inline] pub fn delta_q_present(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u32) } } #[inline] pub fn set_delta_q_present(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(19usize, 1u8, val as u64) } } #[inline] pub fn delta_lf_present(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u32) } } #[inline] pub fn set_delta_lf_present(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(20usize, 1u8, val as u64) } } #[inline] pub fn delta_lf_multi(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u32) } } #[inline] pub fn set_delta_lf_multi(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(21usize, 1u8, val as u64) } } #[inline] pub fn segmentation_enabled(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u32) } } #[inline] pub fn set_segmentation_enabled(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(22usize, 1u8, val as u64) } } #[inline] pub fn segmentation_update_map(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u32) } } #[inline] pub fn set_segmentation_update_map(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(23usize, 1u8, val as u64) } } #[inline] pub fn segmentation_temporal_update(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u32) } } #[inline] pub fn set_segmentation_temporal_update(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(24usize, 1u8, val as u64) } } #[inline] pub fn segmentation_update_data(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u32) } } #[inline] pub fn set_segmentation_update_data(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(25usize, 1u8, val as u64) } } #[inline] pub fn UsesLr(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u32) } } #[inline] pub fn set_UsesLr(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(26usize, 1u8, val as u64) } } #[inline] pub fn usesChromaLr(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u32) } } #[inline] pub fn set_usesChromaLr(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(27usize, 1u8, val as u64) } } #[inline] pub fn apply_grain(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u32) } } #[inline] pub fn set_apply_grain(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(28usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(29usize, 3u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(29usize, 3u8, val as u64) } } #[inline] pub fn new_bitfield_1( error_resilient_mode: u32, disable_cdf_update: u32, use_superres: u32, render_and_frame_size_different: u32, allow_screen_content_tools: u32, is_filter_switchable: u32, force_integer_mv: u32, frame_size_override_flag: u32, buffer_removal_time_present_flag: u32, allow_intrabc: u32, frame_refs_short_signaling: u32, allow_high_precision_mv: u32, is_motion_mode_switchable: u32, use_ref_frame_mvs: u32, disable_frame_end_update_cdf: u32, allow_warped_motion: u32, reduced_tx_set: u32, reference_select: u32, skip_mode_present: u32, delta_q_present: u32, delta_lf_present: u32, delta_lf_multi: u32, segmentation_enabled: u32, segmentation_update_map: u32, segmentation_temporal_update: u32, segmentation_update_data: u32, UsesLr: u32, usesChromaLr: u32, apply_grain: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let error_resilient_mode: u32 = unsafe { ::core::mem::transmute(error_resilient_mode) }; error_resilient_mode as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let disable_cdf_update: u32 = unsafe { ::core::mem::transmute(disable_cdf_update) }; disable_cdf_update as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let use_superres: u32 = unsafe { ::core::mem::transmute(use_superres) }; use_superres as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let render_and_frame_size_different: u32 = unsafe { ::core::mem::transmute(render_and_frame_size_different) }; render_and_frame_size_different as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let allow_screen_content_tools: u32 = unsafe { ::core::mem::transmute(allow_screen_content_tools) }; allow_screen_content_tools as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let is_filter_switchable: u32 = unsafe { ::core::mem::transmute(is_filter_switchable) }; is_filter_switchable as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let force_integer_mv: u32 = unsafe { ::core::mem::transmute(force_integer_mv) }; force_integer_mv as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let frame_size_override_flag: u32 = unsafe { ::core::mem::transmute(frame_size_override_flag) }; frame_size_override_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let buffer_removal_time_present_flag: u32 = unsafe { ::core::mem::transmute(buffer_removal_time_present_flag) }; buffer_removal_time_present_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let allow_intrabc: u32 = unsafe { ::core::mem::transmute(allow_intrabc) }; allow_intrabc as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let frame_refs_short_signaling: u32 = unsafe { ::core::mem::transmute(frame_refs_short_signaling) }; frame_refs_short_signaling as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let allow_high_precision_mv: u32 = unsafe { ::core::mem::transmute(allow_high_precision_mv) }; allow_high_precision_mv as u64 }); __bindgen_bitfield_unit.set(12usize, 1u8, { let is_motion_mode_switchable: u32 = unsafe { ::core::mem::transmute(is_motion_mode_switchable) }; is_motion_mode_switchable as u64 }); __bindgen_bitfield_unit.set(13usize, 1u8, { let use_ref_frame_mvs: u32 = unsafe { ::core::mem::transmute(use_ref_frame_mvs) }; use_ref_frame_mvs as u64 }); __bindgen_bitfield_unit.set(14usize, 1u8, { let disable_frame_end_update_cdf: u32 = unsafe { ::core::mem::transmute(disable_frame_end_update_cdf) }; disable_frame_end_update_cdf as u64 }); __bindgen_bitfield_unit.set(15usize, 1u8, { let allow_warped_motion: u32 = unsafe { ::core::mem::transmute(allow_warped_motion) }; allow_warped_motion as u64 }); __bindgen_bitfield_unit.set(16usize, 1u8, { let reduced_tx_set: u32 = unsafe { ::core::mem::transmute(reduced_tx_set) }; reduced_tx_set as u64 }); __bindgen_bitfield_unit.set(17usize, 1u8, { let reference_select: u32 = unsafe { ::core::mem::transmute(reference_select) }; reference_select as u64 }); __bindgen_bitfield_unit.set(18usize, 1u8, { let skip_mode_present: u32 = unsafe { ::core::mem::transmute(skip_mode_present) }; skip_mode_present as u64 }); __bindgen_bitfield_unit.set(19usize, 1u8, { let delta_q_present: u32 = unsafe { ::core::mem::transmute(delta_q_present) }; delta_q_present as u64 }); __bindgen_bitfield_unit.set(20usize, 1u8, { let delta_lf_present: u32 = unsafe { ::core::mem::transmute(delta_lf_present) }; delta_lf_present as u64 }); __bindgen_bitfield_unit.set(21usize, 1u8, { let delta_lf_multi: u32 = unsafe { ::core::mem::transmute(delta_lf_multi) }; delta_lf_multi as u64 }); __bindgen_bitfield_unit.set(22usize, 1u8, { let segmentation_enabled: u32 = unsafe { ::core::mem::transmute(segmentation_enabled) }; segmentation_enabled as u64 }); __bindgen_bitfield_unit.set(23usize, 1u8, { let segmentation_update_map: u32 = unsafe { ::core::mem::transmute(segmentation_update_map) }; segmentation_update_map as u64 }); __bindgen_bitfield_unit.set(24usize, 1u8, { let segmentation_temporal_update: u32 = unsafe { ::core::mem::transmute(segmentation_temporal_update) }; segmentation_temporal_update as u64 }); __bindgen_bitfield_unit.set(25usize, 1u8, { let segmentation_update_data: u32 = unsafe { ::core::mem::transmute(segmentation_update_data) }; segmentation_update_data as u64 }); __bindgen_bitfield_unit.set(26usize, 1u8, { let UsesLr: u32 = unsafe { ::core::mem::transmute(UsesLr) }; UsesLr as u64 }); __bindgen_bitfield_unit.set(27usize, 1u8, { let usesChromaLr: u32 = unsafe { ::core::mem::transmute(usesChromaLr) }; usesChromaLr as u64 }); __bindgen_bitfield_unit.set(28usize, 1u8, { let apply_grain: u32 = unsafe { ::core::mem::transmute(apply_grain) }; apply_grain as u64 }); __bindgen_bitfield_unit.set(29usize, 3u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeAV1PictureInfo { pub flags: StdVideoDecodeAV1PictureInfoFlags, pub frame_type: StdVideoAV1FrameType, pub current_frame_id: u32, pub OrderHint: u8, pub primary_ref_frame: u8, pub refresh_frame_flags: u8, pub reserved1: u8, pub interpolation_filter: StdVideoAV1InterpolationFilter, pub TxMode: StdVideoAV1TxMode, pub delta_q_res: u8, pub delta_lf_res: u8, pub SkipModeFrame: [u8; 2usize], pub coded_denom: u8, pub reserved2: [u8; 3usize], pub OrderHints: [u8; 8usize], pub expectedFrameId: [u32; 8usize], pub pTileInfo: *const StdVideoAV1TileInfo, pub pQuantization: *const StdVideoAV1Quantization, pub pSegmentation: *const StdVideoAV1Segmentation, pub pLoopFilter: *const StdVideoAV1LoopFilter, pub pCDEF: *const StdVideoAV1CDEF, pub pLoopRestoration: *const StdVideoAV1LoopRestoration, pub pGlobalMotion: *const StdVideoAV1GlobalMotion, pub pFilmGrain: *const StdVideoAV1FilmGrain, } #[test] fn bindgen_test_layout_StdVideoDecodeAV1PictureInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 136usize, concat!("Size of: ", stringify!(StdVideoDecodeAV1PictureInfo)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoDecodeAV1PictureInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(frame_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).current_frame_id) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(current_frame_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).OrderHint) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(OrderHint) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).primary_ref_frame) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(primary_ref_frame) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).refresh_frame_flags) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(refresh_frame_flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).interpolation_filter) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(interpolation_filter) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).TxMode) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(TxMode) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_q_res) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(delta_q_res) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_lf_res) as usize - ptr as usize }, 25usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(delta_lf_res) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).SkipModeFrame) as usize - ptr as usize }, 26usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(SkipModeFrame) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).coded_denom) as usize - ptr as usize }, 28usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(coded_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize }, 29usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(reserved2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).OrderHints) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(OrderHints) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).expectedFrameId) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(expectedFrameId) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pTileInfo) as usize - ptr as usize }, 72usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pTileInfo) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pQuantization) as usize - ptr as usize }, 80usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pQuantization) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pSegmentation) as usize - ptr as usize }, 88usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pSegmentation) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pLoopFilter) as usize - ptr as usize }, 96usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pLoopFilter) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pCDEF) as usize - ptr as usize }, 104usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pCDEF) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pLoopRestoration) as usize - ptr as usize }, 112usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pLoopRestoration) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pGlobalMotion) as usize - ptr as usize }, 120usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pGlobalMotion) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pFilmGrain) as usize - ptr as usize }, 128usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1PictureInfo), "::", stringify!(pFilmGrain) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeAV1ReferenceInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoDecodeAV1ReferenceInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoDecodeAV1ReferenceInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoDecodeAV1ReferenceInfoFlags) ) ); } impl StdVideoDecodeAV1ReferenceInfoFlags { #[inline] pub fn disable_frame_end_update_cdf(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_disable_frame_end_update_cdf(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn segmentation_enabled(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_segmentation_enabled(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( disable_frame_end_update_cdf: u32, segmentation_enabled: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let disable_frame_end_update_cdf: u32 = unsafe { ::core::mem::transmute(disable_frame_end_update_cdf) }; disable_frame_end_update_cdf as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let segmentation_enabled: u32 = unsafe { ::core::mem::transmute(segmentation_enabled) }; segmentation_enabled as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoDecodeAV1ReferenceInfo { pub flags: StdVideoDecodeAV1ReferenceInfoFlags, pub frame_type: u8, pub RefFrameSignBias: u8, pub OrderHint: u8, pub SavedOrderHints: [u8; 8usize], } #[test] fn bindgen_test_layout_StdVideoDecodeAV1ReferenceInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoDecodeAV1ReferenceInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoDecodeAV1ReferenceInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1ReferenceInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1ReferenceInfo), "::", stringify!(frame_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefFrameSignBias) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1ReferenceInfo), "::", stringify!(RefFrameSignBias) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).OrderHint) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1ReferenceInfo), "::", stringify!(OrderHint) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).SavedOrderHints) as usize - ptr as usize }, 7usize, concat!( "Offset of field: ", stringify!(StdVideoDecodeAV1ReferenceInfo), "::", stringify!(SavedOrderHints) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264WeightTableFlags { pub luma_weight_l0_flag: u32, pub chroma_weight_l0_flag: u32, pub luma_weight_l1_flag: u32, pub chroma_weight_l1_flag: u32, } #[test] fn bindgen_test_layout_StdVideoEncodeH264WeightTableFlags() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoEncodeH264WeightTableFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264WeightTableFlags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l0_flag) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTableFlags), "::", stringify!(luma_weight_l0_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l0_flag) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTableFlags), "::", stringify!(chroma_weight_l0_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l1_flag) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTableFlags), "::", stringify!(luma_weight_l1_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l1_flag) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTableFlags), "::", stringify!(chroma_weight_l1_flag) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264WeightTable { pub flags: StdVideoEncodeH264WeightTableFlags, pub luma_log2_weight_denom: u8, pub chroma_log2_weight_denom: u8, pub luma_weight_l0: [i8; 32usize], pub luma_offset_l0: [i8; 32usize], pub chroma_weight_l0: [[i8; 2usize]; 32usize], pub chroma_offset_l0: [[i8; 2usize]; 32usize], pub luma_weight_l1: [i8; 32usize], pub luma_offset_l1: [i8; 32usize], pub chroma_weight_l1: [[i8; 2usize]; 32usize], pub chroma_offset_l1: [[i8; 2usize]; 32usize], } #[test] fn bindgen_test_layout_StdVideoEncodeH264WeightTable() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 404usize, concat!("Size of: ", stringify!(StdVideoEncodeH264WeightTable)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoEncodeH264WeightTable)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_log2_weight_denom) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(luma_log2_weight_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_log2_weight_denom) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(chroma_log2_weight_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l0) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(luma_weight_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_offset_l0) as usize - ptr as usize }, 50usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(luma_offset_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l0) as usize - ptr as usize }, 82usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(chroma_weight_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_offset_l0) as usize - ptr as usize }, 146usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(chroma_offset_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l1) as usize - ptr as usize }, 210usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(luma_weight_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_offset_l1) as usize - ptr as usize }, 242usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(luma_offset_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l1) as usize - ptr as usize }, 274usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(chroma_weight_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_offset_l1) as usize - ptr as usize }, 338usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264WeightTable), "::", stringify!(chroma_offset_l1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264SliceHeaderFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH264SliceHeaderFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoEncodeH264SliceHeaderFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264SliceHeaderFlags) ) ); } impl StdVideoEncodeH264SliceHeaderFlags { #[inline] pub fn direct_spatial_mv_pred_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_direct_spatial_mv_pred_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn num_ref_idx_active_override_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_num_ref_idx_active_override_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( direct_spatial_mv_pred_flag: u32, num_ref_idx_active_override_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let direct_spatial_mv_pred_flag: u32 = unsafe { ::core::mem::transmute(direct_spatial_mv_pred_flag) }; direct_spatial_mv_pred_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let num_ref_idx_active_override_flag: u32 = unsafe { ::core::mem::transmute(num_ref_idx_active_override_flag) }; num_ref_idx_active_override_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264PictureInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH264PictureInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoEncodeH264PictureInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264PictureInfoFlags) ) ); } impl StdVideoEncodeH264PictureInfoFlags { #[inline] pub fn IdrPicFlag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_IdrPicFlag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn is_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_is_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn no_output_of_prior_pics_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_no_output_of_prior_pics_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn long_term_reference_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_long_term_reference_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn adaptive_ref_pic_marking_mode_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_adaptive_ref_pic_marking_mode_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 27u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 27u8, val as u64) } } #[inline] pub fn new_bitfield_1( IdrPicFlag: u32, is_reference: u32, no_output_of_prior_pics_flag: u32, long_term_reference_flag: u32, adaptive_ref_pic_marking_mode_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let IdrPicFlag: u32 = unsafe { ::core::mem::transmute(IdrPicFlag) }; IdrPicFlag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let is_reference: u32 = unsafe { ::core::mem::transmute(is_reference) }; is_reference as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let no_output_of_prior_pics_flag: u32 = unsafe { ::core::mem::transmute(no_output_of_prior_pics_flag) }; no_output_of_prior_pics_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let long_term_reference_flag: u32 = unsafe { ::core::mem::transmute(long_term_reference_flag) }; long_term_reference_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let adaptive_ref_pic_marking_mode_flag: u32 = unsafe { ::core::mem::transmute(adaptive_ref_pic_marking_mode_flag) }; adaptive_ref_pic_marking_mode_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 27u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264ReferenceInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH264ReferenceInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoEncodeH264ReferenceInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264ReferenceInfoFlags) ) ); } impl StdVideoEncodeH264ReferenceInfoFlags { #[inline] pub fn used_for_long_term_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_used_for_long_term_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 31u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 31u8, val as u64) } } #[inline] pub fn new_bitfield_1( used_for_long_term_reference: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let used_for_long_term_reference: u32 = unsafe { ::core::mem::transmute(used_for_long_term_reference) }; used_for_long_term_reference as u64 }); __bindgen_bitfield_unit.set(1usize, 31u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264ReferenceListsInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH264ReferenceListsInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoEncodeH264ReferenceListsInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264ReferenceListsInfoFlags) ) ); } impl StdVideoEncodeH264ReferenceListsInfoFlags { #[inline] pub fn ref_pic_list_modification_flag_l0(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_ref_pic_list_modification_flag_l0(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn ref_pic_list_modification_flag_l1(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_ref_pic_list_modification_flag_l1(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( ref_pic_list_modification_flag_l0: u32, ref_pic_list_modification_flag_l1: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let ref_pic_list_modification_flag_l0: u32 = unsafe { ::core::mem::transmute(ref_pic_list_modification_flag_l0) }; ref_pic_list_modification_flag_l0 as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let ref_pic_list_modification_flag_l1: u32 = unsafe { ::core::mem::transmute(ref_pic_list_modification_flag_l1) }; ref_pic_list_modification_flag_l1 as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264RefListModEntry { pub modification_of_pic_nums_idc: StdVideoH264ModificationOfPicNumsIdc, pub abs_diff_pic_num_minus1: u16, pub long_term_pic_num: u16, } #[test] fn bindgen_test_layout_StdVideoEncodeH264RefListModEntry() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 8usize, concat!("Size of: ", stringify!(StdVideoEncodeH264RefListModEntry)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264RefListModEntry) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).modification_of_pic_nums_idc) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefListModEntry), "::", stringify!(modification_of_pic_nums_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).abs_diff_pic_num_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefListModEntry), "::", stringify!(abs_diff_pic_num_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefListModEntry), "::", stringify!(long_term_pic_num) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264RefPicMarkingEntry { pub memory_management_control_operation: StdVideoH264MemMgmtControlOp, pub difference_of_pic_nums_minus1: u16, pub long_term_pic_num: u16, pub long_term_frame_idx: u16, pub max_long_term_frame_idx_plus1: u16, } #[test] fn bindgen_test_layout_StdVideoEncodeH264RefPicMarkingEntry() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 12usize, concat!( "Size of: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264RefPicMarkingEntry) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).memory_management_control_operation) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry), "::", stringify!(memory_management_control_operation) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).difference_of_pic_nums_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry), "::", stringify!(difference_of_pic_nums_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry), "::", stringify!(long_term_pic_num) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).long_term_frame_idx) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry), "::", stringify!(long_term_frame_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).max_long_term_frame_idx_plus1) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264RefPicMarkingEntry), "::", stringify!(max_long_term_frame_idx_plus1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264ReferenceListsInfo { pub flags: StdVideoEncodeH264ReferenceListsInfoFlags, pub num_ref_idx_l0_active_minus1: u8, pub num_ref_idx_l1_active_minus1: u8, pub RefPicList0: [u8; 32usize], pub RefPicList1: [u8; 32usize], pub refList0ModOpCount: u8, pub refList1ModOpCount: u8, pub refPicMarkingOpCount: u8, pub reserved1: [u8; 7usize], pub pRefList0ModOperations: *const StdVideoEncodeH264RefListModEntry, pub pRefList1ModOperations: *const StdVideoEncodeH264RefListModEntry, pub pRefPicMarkingOperations: *const StdVideoEncodeH264RefPicMarkingEntry, } #[test] fn bindgen_test_layout_StdVideoEncodeH264ReferenceListsInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 104usize, concat!( "Size of: ", stringify!(StdVideoEncodeH264ReferenceListsInfo) ) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH264ReferenceListsInfo) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l0_active_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(num_ref_idx_l0_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l1_active_minus1) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(num_ref_idx_l1_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicList0) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(RefPicList0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicList1) as usize - ptr as usize }, 38usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(RefPicList1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).refList0ModOpCount) as usize - ptr as usize }, 70usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(refList0ModOpCount) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).refList1ModOpCount) as usize - ptr as usize }, 71usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(refList1ModOpCount) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).refPicMarkingOpCount) as usize - ptr as usize }, 72usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(refPicMarkingOpCount) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 73usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pRefList0ModOperations) as usize - ptr as usize }, 80usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(pRefList0ModOperations) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pRefList1ModOperations) as usize - ptr as usize }, 88usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(pRefList1ModOperations) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pRefPicMarkingOperations) as usize - ptr as usize }, 96usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceListsInfo), "::", stringify!(pRefPicMarkingOperations) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264PictureInfo { pub flags: StdVideoEncodeH264PictureInfoFlags, pub seq_parameter_set_id: u8, pub pic_parameter_set_id: u8, pub idr_pic_id: u16, pub primary_pic_type: StdVideoH264PictureType, pub frame_num: u32, pub PicOrderCnt: i32, pub temporal_id: u8, pub reserved1: [u8; 3usize], pub pRefLists: *const StdVideoEncodeH264ReferenceListsInfo, } #[test] fn bindgen_test_layout_StdVideoEncodeH264PictureInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 32usize, concat!("Size of: ", stringify!(StdVideoEncodeH264PictureInfo)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoEncodeH264PictureInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).idr_pic_id) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(idr_pic_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).primary_pic_type) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(primary_pic_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).frame_num) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(frame_num) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(PicOrderCnt) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).temporal_id) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(temporal_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 21usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pRefLists) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264PictureInfo), "::", stringify!(pRefLists) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264ReferenceInfo { pub flags: StdVideoEncodeH264ReferenceInfoFlags, pub primary_pic_type: StdVideoH264PictureType, pub FrameNum: u32, pub PicOrderCnt: i32, pub long_term_pic_num: u16, pub long_term_frame_idx: u16, pub temporal_id: u8, } #[test] fn bindgen_test_layout_StdVideoEncodeH264ReferenceInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 24usize, concat!("Size of: ", stringify!(StdVideoEncodeH264ReferenceInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoEncodeH264ReferenceInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).primary_pic_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(primary_pic_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).FrameNum) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(FrameNum) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(PicOrderCnt) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(long_term_pic_num) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).long_term_frame_idx) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(long_term_frame_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).temporal_id) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264ReferenceInfo), "::", stringify!(temporal_id) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH264SliceHeader { pub flags: StdVideoEncodeH264SliceHeaderFlags, pub first_mb_in_slice: u32, pub slice_type: StdVideoH264SliceType, pub slice_alpha_c0_offset_div2: i8, pub slice_beta_offset_div2: i8, pub slice_qp_delta: i8, pub reserved1: u8, pub cabac_init_idc: StdVideoH264CabacInitIdc, pub disable_deblocking_filter_idc: StdVideoH264DisableDeblockingFilterIdc, pub pWeightTable: *const StdVideoEncodeH264WeightTable, } #[test] fn bindgen_test_layout_StdVideoEncodeH264SliceHeader() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 32usize, concat!("Size of: ", stringify!(StdVideoEncodeH264SliceHeader)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoEncodeH264SliceHeader)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).first_mb_in_slice) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(first_mb_in_slice) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_type) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(slice_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_alpha_c0_offset_div2) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(slice_alpha_c0_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_beta_offset_div2) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(slice_beta_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_qp_delta) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(slice_qp_delta) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).cabac_init_idc) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(cabac_init_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).disable_deblocking_filter_idc) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(disable_deblocking_filter_idc) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pWeightTable) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH264SliceHeader), "::", stringify!(pWeightTable) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265WeightTableFlags { pub luma_weight_l0_flag: u16, pub chroma_weight_l0_flag: u16, pub luma_weight_l1_flag: u16, pub chroma_weight_l1_flag: u16, } #[test] fn bindgen_test_layout_StdVideoEncodeH265WeightTableFlags() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 8usize, concat!("Size of: ", stringify!(StdVideoEncodeH265WeightTableFlags)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265WeightTableFlags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l0_flag) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTableFlags), "::", stringify!(luma_weight_l0_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l0_flag) as usize - ptr as usize }, 2usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTableFlags), "::", stringify!(chroma_weight_l0_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_weight_l1_flag) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTableFlags), "::", stringify!(luma_weight_l1_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).chroma_weight_l1_flag) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTableFlags), "::", stringify!(chroma_weight_l1_flag) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265WeightTable { pub flags: StdVideoEncodeH265WeightTableFlags, pub luma_log2_weight_denom: u8, pub delta_chroma_log2_weight_denom: i8, pub delta_luma_weight_l0: [i8; 15usize], pub luma_offset_l0: [i8; 15usize], pub delta_chroma_weight_l0: [[i8; 2usize]; 15usize], pub delta_chroma_offset_l0: [[i8; 2usize]; 15usize], pub delta_luma_weight_l1: [i8; 15usize], pub luma_offset_l1: [i8; 15usize], pub delta_chroma_weight_l1: [[i8; 2usize]; 15usize], pub delta_chroma_offset_l1: [[i8; 2usize]; 15usize], } #[test] fn bindgen_test_layout_StdVideoEncodeH265WeightTable() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 190usize, concat!("Size of: ", stringify!(StdVideoEncodeH265WeightTable)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!("Alignment of ", stringify!(StdVideoEncodeH265WeightTable)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_log2_weight_denom) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(luma_log2_weight_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_chroma_log2_weight_denom) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_chroma_log2_weight_denom) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_luma_weight_l0) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_luma_weight_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_offset_l0) as usize - ptr as usize }, 25usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(luma_offset_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_chroma_weight_l0) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_chroma_weight_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_chroma_offset_l0) as usize - ptr as usize }, 70usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_chroma_offset_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_luma_weight_l1) as usize - ptr as usize }, 100usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_luma_weight_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).luma_offset_l1) as usize - ptr as usize }, 115usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(luma_offset_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_chroma_weight_l1) as usize - ptr as usize }, 130usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_chroma_weight_l1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_chroma_offset_l1) as usize - ptr as usize }, 160usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265WeightTable), "::", stringify!(delta_chroma_offset_l1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265SliceSegmentHeaderFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH265SliceSegmentHeaderFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoEncodeH265SliceSegmentHeaderFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265SliceSegmentHeaderFlags) ) ); } impl StdVideoEncodeH265SliceSegmentHeaderFlags { #[inline] pub fn first_slice_segment_in_pic_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_first_slice_segment_in_pic_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn dependent_slice_segment_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_dependent_slice_segment_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn slice_sao_luma_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_slice_sao_luma_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn slice_sao_chroma_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_slice_sao_chroma_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn num_ref_idx_active_override_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_num_ref_idx_active_override_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn mvd_l1_zero_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_mvd_l1_zero_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn cabac_init_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_cabac_init_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn cu_chroma_qp_offset_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_cu_chroma_qp_offset_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn deblocking_filter_override_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_deblocking_filter_override_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn slice_deblocking_filter_disabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) } } #[inline] pub fn set_slice_deblocking_filter_disabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 1u8, val as u64) } } #[inline] pub fn collocated_from_l0_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) } } #[inline] pub fn set_collocated_from_l0_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(10usize, 1u8, val as u64) } } #[inline] pub fn slice_loop_filter_across_slices_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) } } #[inline] pub fn set_slice_loop_filter_across_slices_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(11usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(12usize, 20u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(12usize, 20u8, val as u64) } } #[inline] pub fn new_bitfield_1( first_slice_segment_in_pic_flag: u32, dependent_slice_segment_flag: u32, slice_sao_luma_flag: u32, slice_sao_chroma_flag: u32, num_ref_idx_active_override_flag: u32, mvd_l1_zero_flag: u32, cabac_init_flag: u32, cu_chroma_qp_offset_enabled_flag: u32, deblocking_filter_override_flag: u32, slice_deblocking_filter_disabled_flag: u32, collocated_from_l0_flag: u32, slice_loop_filter_across_slices_enabled_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let first_slice_segment_in_pic_flag: u32 = unsafe { ::core::mem::transmute(first_slice_segment_in_pic_flag) }; first_slice_segment_in_pic_flag as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let dependent_slice_segment_flag: u32 = unsafe { ::core::mem::transmute(dependent_slice_segment_flag) }; dependent_slice_segment_flag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let slice_sao_luma_flag: u32 = unsafe { ::core::mem::transmute(slice_sao_luma_flag) }; slice_sao_luma_flag as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let slice_sao_chroma_flag: u32 = unsafe { ::core::mem::transmute(slice_sao_chroma_flag) }; slice_sao_chroma_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let num_ref_idx_active_override_flag: u32 = unsafe { ::core::mem::transmute(num_ref_idx_active_override_flag) }; num_ref_idx_active_override_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let mvd_l1_zero_flag: u32 = unsafe { ::core::mem::transmute(mvd_l1_zero_flag) }; mvd_l1_zero_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let cabac_init_flag: u32 = unsafe { ::core::mem::transmute(cabac_init_flag) }; cabac_init_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let cu_chroma_qp_offset_enabled_flag: u32 = unsafe { ::core::mem::transmute(cu_chroma_qp_offset_enabled_flag) }; cu_chroma_qp_offset_enabled_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let deblocking_filter_override_flag: u32 = unsafe { ::core::mem::transmute(deblocking_filter_override_flag) }; deblocking_filter_override_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 1u8, { let slice_deblocking_filter_disabled_flag: u32 = unsafe { ::core::mem::transmute(slice_deblocking_filter_disabled_flag) }; slice_deblocking_filter_disabled_flag as u64 }); __bindgen_bitfield_unit.set(10usize, 1u8, { let collocated_from_l0_flag: u32 = unsafe { ::core::mem::transmute(collocated_from_l0_flag) }; collocated_from_l0_flag as u64 }); __bindgen_bitfield_unit.set(11usize, 1u8, { let slice_loop_filter_across_slices_enabled_flag: u32 = unsafe { ::core::mem::transmute(slice_loop_filter_across_slices_enabled_flag) }; slice_loop_filter_across_slices_enabled_flag as u64 }); __bindgen_bitfield_unit.set(12usize, 20u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265SliceSegmentHeader { pub flags: StdVideoEncodeH265SliceSegmentHeaderFlags, pub slice_type: StdVideoH265SliceType, pub slice_segment_address: u32, pub collocated_ref_idx: u8, pub MaxNumMergeCand: u8, pub slice_cb_qp_offset: i8, pub slice_cr_qp_offset: i8, pub slice_beta_offset_div2: i8, pub slice_tc_offset_div2: i8, pub slice_act_y_qp_offset: i8, pub slice_act_cb_qp_offset: i8, pub slice_act_cr_qp_offset: i8, pub slice_qp_delta: i8, pub reserved1: u16, pub pWeightTable: *const StdVideoEncodeH265WeightTable, } #[test] fn bindgen_test_layout_StdVideoEncodeH265SliceSegmentHeader() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 32usize, concat!( "Size of: ", stringify!(StdVideoEncodeH265SliceSegmentHeader) ) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265SliceSegmentHeader) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_segment_address) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_segment_address) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).collocated_ref_idx) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(collocated_ref_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).MaxNumMergeCand) as usize - ptr as usize }, 13usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(MaxNumMergeCand) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_cb_qp_offset) as usize - ptr as usize }, 14usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_cb_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_cr_qp_offset) as usize - ptr as usize }, 15usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_cr_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_beta_offset_div2) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_beta_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_tc_offset_div2) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_tc_offset_div2) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_act_y_qp_offset) as usize - ptr as usize }, 18usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_act_y_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_act_cb_qp_offset) as usize - ptr as usize }, 19usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_act_cb_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_act_cr_qp_offset) as usize - ptr as usize }, 20usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_act_cr_qp_offset) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).slice_qp_delta) as usize - ptr as usize }, 21usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(slice_qp_delta) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 22usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pWeightTable) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265SliceSegmentHeader), "::", stringify!(pWeightTable) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265ReferenceListsInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH265ReferenceListsInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoEncodeH265ReferenceListsInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265ReferenceListsInfoFlags) ) ); } impl StdVideoEncodeH265ReferenceListsInfoFlags { #[inline] pub fn ref_pic_list_modification_flag_l0(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_ref_pic_list_modification_flag_l0(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn ref_pic_list_modification_flag_l1(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_ref_pic_list_modification_flag_l1(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( ref_pic_list_modification_flag_l0: u32, ref_pic_list_modification_flag_l1: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let ref_pic_list_modification_flag_l0: u32 = unsafe { ::core::mem::transmute(ref_pic_list_modification_flag_l0) }; ref_pic_list_modification_flag_l0 as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let ref_pic_list_modification_flag_l1: u32 = unsafe { ::core::mem::transmute(ref_pic_list_modification_flag_l1) }; ref_pic_list_modification_flag_l1 as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265ReferenceListsInfo { pub flags: StdVideoEncodeH265ReferenceListsInfoFlags, pub num_ref_idx_l0_active_minus1: u8, pub num_ref_idx_l1_active_minus1: u8, pub RefPicList0: [u8; 15usize], pub RefPicList1: [u8; 15usize], pub list_entry_l0: [u8; 15usize], pub list_entry_l1: [u8; 15usize], } #[test] fn bindgen_test_layout_StdVideoEncodeH265ReferenceListsInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 68usize, concat!( "Size of: ", stringify!(StdVideoEncodeH265ReferenceListsInfo) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265ReferenceListsInfo) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l0_active_minus1) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(num_ref_idx_l0_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_ref_idx_l1_active_minus1) as usize - ptr as usize }, 5usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(num_ref_idx_l1_active_minus1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicList0) as usize - ptr as usize }, 6usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(RefPicList0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).RefPicList1) as usize - ptr as usize }, 21usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(RefPicList1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).list_entry_l0) as usize - ptr as usize }, 36usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(list_entry_l0) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).list_entry_l1) as usize - ptr as usize }, 51usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceListsInfo), "::", stringify!(list_entry_l1) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265PictureInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH265PictureInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!("Size of: ", stringify!(StdVideoEncodeH265PictureInfoFlags)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265PictureInfoFlags) ) ); } impl StdVideoEncodeH265PictureInfoFlags { #[inline] pub fn is_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_is_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn IrapPicFlag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_IrapPicFlag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn used_for_long_term_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) } } #[inline] pub fn set_used_for_long_term_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 1u8, val as u64) } } #[inline] pub fn discardable_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) } } #[inline] pub fn set_discardable_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(3usize, 1u8, val as u64) } } #[inline] pub fn cross_layer_bla_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) } } #[inline] pub fn set_cross_layer_bla_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(4usize, 1u8, val as u64) } } #[inline] pub fn pic_output_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) } } #[inline] pub fn set_pic_output_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(5usize, 1u8, val as u64) } } #[inline] pub fn no_output_of_prior_pics_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) } } #[inline] pub fn set_no_output_of_prior_pics_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(6usize, 1u8, val as u64) } } #[inline] pub fn short_term_ref_pic_set_sps_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) } } #[inline] pub fn set_short_term_ref_pic_set_sps_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(7usize, 1u8, val as u64) } } #[inline] pub fn slice_temporal_mvp_enabled_flag(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) } } #[inline] pub fn set_slice_temporal_mvp_enabled_flag(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(8usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(9usize, 23u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(9usize, 23u8, val as u64) } } #[inline] pub fn new_bitfield_1( is_reference: u32, IrapPicFlag: u32, used_for_long_term_reference: u32, discardable_flag: u32, cross_layer_bla_flag: u32, pic_output_flag: u32, no_output_of_prior_pics_flag: u32, short_term_ref_pic_set_sps_flag: u32, slice_temporal_mvp_enabled_flag: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let is_reference: u32 = unsafe { ::core::mem::transmute(is_reference) }; is_reference as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let IrapPicFlag: u32 = unsafe { ::core::mem::transmute(IrapPicFlag) }; IrapPicFlag as u64 }); __bindgen_bitfield_unit.set(2usize, 1u8, { let used_for_long_term_reference: u32 = unsafe { ::core::mem::transmute(used_for_long_term_reference) }; used_for_long_term_reference as u64 }); __bindgen_bitfield_unit.set(3usize, 1u8, { let discardable_flag: u32 = unsafe { ::core::mem::transmute(discardable_flag) }; discardable_flag as u64 }); __bindgen_bitfield_unit.set(4usize, 1u8, { let cross_layer_bla_flag: u32 = unsafe { ::core::mem::transmute(cross_layer_bla_flag) }; cross_layer_bla_flag as u64 }); __bindgen_bitfield_unit.set(5usize, 1u8, { let pic_output_flag: u32 = unsafe { ::core::mem::transmute(pic_output_flag) }; pic_output_flag as u64 }); __bindgen_bitfield_unit.set(6usize, 1u8, { let no_output_of_prior_pics_flag: u32 = unsafe { ::core::mem::transmute(no_output_of_prior_pics_flag) }; no_output_of_prior_pics_flag as u64 }); __bindgen_bitfield_unit.set(7usize, 1u8, { let short_term_ref_pic_set_sps_flag: u32 = unsafe { ::core::mem::transmute(short_term_ref_pic_set_sps_flag) }; short_term_ref_pic_set_sps_flag as u64 }); __bindgen_bitfield_unit.set(8usize, 1u8, { let slice_temporal_mvp_enabled_flag: u32 = unsafe { ::core::mem::transmute(slice_temporal_mvp_enabled_flag) }; slice_temporal_mvp_enabled_flag as u64 }); __bindgen_bitfield_unit.set(9usize, 23u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265LongTermRefPics { pub num_long_term_sps: u8, pub num_long_term_pics: u8, pub lt_idx_sps: [u8; 32usize], pub poc_lsb_lt: [u8; 16usize], pub used_by_curr_pic_lt_flag: u16, pub delta_poc_msb_present_flag: [u8; 48usize], pub delta_poc_msb_cycle_lt: [u8; 48usize], } #[test] fn bindgen_test_layout_StdVideoEncodeH265LongTermRefPics() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 148usize, concat!("Size of: ", stringify!(StdVideoEncodeH265LongTermRefPics)) ); assert_eq!( ::core::mem::align_of::(), 2usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265LongTermRefPics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_long_term_sps) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(num_long_term_sps) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).num_long_term_pics) as usize - ptr as usize }, 1usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(num_long_term_pics) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).lt_idx_sps) as usize - ptr as usize }, 2usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(lt_idx_sps) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).poc_lsb_lt) as usize - ptr as usize }, 34usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(poc_lsb_lt) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).used_by_curr_pic_lt_flag) as usize - ptr as usize }, 50usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(used_by_curr_pic_lt_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_poc_msb_present_flag) as usize - ptr as usize }, 52usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(delta_poc_msb_present_flag) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).delta_poc_msb_cycle_lt) as usize - ptr as usize }, 100usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265LongTermRefPics), "::", stringify!(delta_poc_msb_cycle_lt) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265PictureInfo { pub flags: StdVideoEncodeH265PictureInfoFlags, pub pic_type: StdVideoH265PictureType, pub sps_video_parameter_set_id: u8, pub pps_seq_parameter_set_id: u8, pub pps_pic_parameter_set_id: u8, pub short_term_ref_pic_set_idx: u8, pub PicOrderCntVal: i32, pub TemporalId: u8, pub reserved1: [u8; 7usize], pub pRefLists: *const StdVideoEncodeH265ReferenceListsInfo, pub pShortTermRefPicSet: *const StdVideoH265ShortTermRefPicSet, pub pLongTermRefPics: *const StdVideoEncodeH265LongTermRefPics, } #[test] fn bindgen_test_layout_StdVideoEncodeH265PictureInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 48usize, concat!("Size of: ", stringify!(StdVideoEncodeH265PictureInfo)) ); assert_eq!( ::core::mem::align_of::(), 8usize, concat!("Alignment of ", stringify!(StdVideoEncodeH265PictureInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pic_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(sps_video_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize }, 9usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pps_seq_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize }, 10usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pps_pic_parameter_set_id) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).short_term_ref_pic_set_idx) as usize - ptr as usize }, 11usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(short_term_ref_pic_set_idx) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(PicOrderCntVal) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).TemporalId) as usize - ptr as usize }, 16usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(TemporalId) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize }, 17usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(reserved1) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pRefLists) as usize - ptr as usize }, 24usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pRefLists) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pShortTermRefPicSet) as usize - ptr as usize }, 32usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pShortTermRefPicSet) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pLongTermRefPics) as usize - ptr as usize }, 40usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265PictureInfo), "::", stringify!(pLongTermRefPics) ) ); } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265ReferenceInfoFlags { pub _bitfield_align_1: [u32; 0], pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>, } #[test] fn bindgen_test_layout_StdVideoEncodeH265ReferenceInfoFlags() { assert_eq!( ::core::mem::size_of::(), 4usize, concat!( "Size of: ", stringify!(StdVideoEncodeH265ReferenceInfoFlags) ) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!( "Alignment of ", stringify!(StdVideoEncodeH265ReferenceInfoFlags) ) ); } impl StdVideoEncodeH265ReferenceInfoFlags { #[inline] pub fn used_for_long_term_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) } } #[inline] pub fn set_used_for_long_term_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(0usize, 1u8, val as u64) } } #[inline] pub fn unused_for_reference(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) } } #[inline] pub fn set_unused_for_reference(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(1usize, 1u8, val as u64) } } #[inline] pub fn reserved(&self) -> u32 { unsafe { ::core::mem::transmute(self._bitfield_1.get(2usize, 30u8) as u32) } } #[inline] pub fn set_reserved(&mut self, val: u32) { unsafe { let val: u32 = ::core::mem::transmute(val); self._bitfield_1.set(2usize, 30u8, val as u64) } } #[inline] pub fn new_bitfield_1( used_for_long_term_reference: u32, unused_for_reference: u32, reserved: u32, ) -> __BindgenBitfieldUnit<[u8; 4usize]> { let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default(); __bindgen_bitfield_unit.set(0usize, 1u8, { let used_for_long_term_reference: u32 = unsafe { ::core::mem::transmute(used_for_long_term_reference) }; used_for_long_term_reference as u64 }); __bindgen_bitfield_unit.set(1usize, 1u8, { let unused_for_reference: u32 = unsafe { ::core::mem::transmute(unused_for_reference) }; unused_for_reference as u64 }); __bindgen_bitfield_unit.set(2usize, 30u8, { let reserved: u32 = unsafe { ::core::mem::transmute(reserved) }; reserved as u64 }); __bindgen_bitfield_unit } } #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct StdVideoEncodeH265ReferenceInfo { pub flags: StdVideoEncodeH265ReferenceInfoFlags, pub pic_type: StdVideoH265PictureType, pub PicOrderCntVal: i32, pub TemporalId: u8, } #[test] fn bindgen_test_layout_StdVideoEncodeH265ReferenceInfo() { const UNINIT: ::core::mem::MaybeUninit = ::core::mem::MaybeUninit::uninit(); let ptr = UNINIT.as_ptr(); assert_eq!( ::core::mem::size_of::(), 16usize, concat!("Size of: ", stringify!(StdVideoEncodeH265ReferenceInfo)) ); assert_eq!( ::core::mem::align_of::(), 4usize, concat!("Alignment of ", stringify!(StdVideoEncodeH265ReferenceInfo)) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).flags) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceInfo), "::", stringify!(flags) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).pic_type) as usize - ptr as usize }, 4usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceInfo), "::", stringify!(pic_type) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize }, 8usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceInfo), "::", stringify!(PicOrderCntVal) ) ); assert_eq!( unsafe { ::core::ptr::addr_of!((*ptr).TemporalId) as usize - ptr as usize }, 12usize, concat!( "Offset of field: ", stringify!(StdVideoEncodeH265ReferenceInfo), "::", stringify!(TemporalId) ) ); } ash-0.38.0+1.3.281/src/vk/platform_types.rs000064400000000000000000000035141046102023000161250ustar 00000000000000#![allow(non_camel_case_types)] use core::ffi::*; pub type RROutput = c_ulong; pub type VisualID = c_uint; pub type Display = c_void; pub type Window = c_ulong; pub type xcb_connection_t = c_void; pub type xcb_window_t = u32; pub type xcb_visualid_t = u32; pub type MirConnection = *const c_void; pub type MirSurface = *const c_void; /// pub type HANDLE = isize; /// pub type HINSTANCE = HANDLE; /// pub type HWND = HANDLE; /// pub type HMONITOR = HANDLE; pub type wl_display = c_void; pub type wl_surface = c_void; pub type DWORD = c_ulong; pub type LPCWSTR = *const u16; pub type zx_handle_t = u32; pub type _screen_buffer = c_void; pub type _screen_context = c_void; pub type _screen_window = c_void; pub type SECURITY_ATTRIBUTES = c_void; // Opaque types pub type ANativeWindow = c_void; pub type AHardwareBuffer = c_void; pub type CAMetalLayer = c_void; // This definition is behind an NDA with a best effort guess from // https://github.com/google/gapid/commit/22aafebec4638c6aaa77667096bca30f6e842d95#diff-ab3ab4a7d89b4fc8a344ff4e9332865f268ea1669ee379c1b516a954ecc2e7a6R20-R21 pub type GgpStreamDescriptor = u32; pub type GgpFrameToken = u64; pub type IDirectFB = c_void; pub type IDirectFBSurface = c_void; pub type __IOSurface = c_void; pub type IOSurfaceRef = *mut __IOSurface; pub type MTLBuffer_id = *mut c_void; pub type MTLCommandQueue_id = *mut c_void; pub type MTLDevice_id = *mut c_void; pub type MTLSharedEvent_id = *mut c_void; pub type MTLTexture_id = *mut c_void; ash-0.38.0+1.3.281/src/vk/prelude.rs000064400000000000000000000070431046102023000145160ustar 00000000000000use core::ffi::c_char; use core::fmt; use crate::vk; /// Holds 24 bits in the least significant bits of memory, /// and 8 bytes in the most significant bits of that memory, /// occupying a single [`u32`] in total. This is commonly used in /// [acceleration structure instances] such as /// [`vk::AccelerationStructureInstanceKHR`], /// [`vk::AccelerationStructureSRTMotionInstanceNV`] and /// [`vk::AccelerationStructureMatrixMotionInstanceNV`]. /// /// [acceleration structure instances]: https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureInstanceKHR.html#_description #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] #[repr(transparent)] pub struct Packed24_8(u32); impl Packed24_8 { pub fn new(low_24: u32, high_8: u8) -> Self { Self((low_24 & 0x00ff_ffff) | (u32::from(high_8) << 24)) } /// Extracts the least-significant 24 bits (3 bytes) of this integer pub fn low_24(&self) -> u32 { self.0 & 0xffffff } /// Extracts the most significant 8 bits (single byte) of this integer pub fn high_8(&self) -> u8 { (self.0 >> 24) as u8 } } impl vk::ColorComponentFlags { /// Contraction of [`R`][Self::R] | [`G`][Self::G] | [`B`][Self::B] | [`A`][Self::A] pub const RGBA: Self = Self(Self::R.0 | Self::G.0 | Self::B.0 | Self::A.0); } impl From for vk::Extent3D { fn from(value: vk::Extent2D) -> Self { Self { width: value.width, height: value.height, depth: 1, } } } impl From for vk::Rect2D { fn from(extent: vk::Extent2D) -> Self { Self { offset: Default::default(), extent, } } } /// Structures implementing this trait are layout-compatible with [`vk::BaseInStructure`] and /// [`vk::BaseOutStructure`]. Such structures have an `s_type` field indicating its type, which /// must always match the value of [`TaggedStructure::STRUCTURE_TYPE`]. pub unsafe trait TaggedStructure { const STRUCTURE_TYPE: vk::StructureType; } #[inline] pub(crate) fn wrap_c_str_slice_until_nul( str: &[c_char], ) -> Result<&core::ffi::CStr, core::ffi::FromBytesUntilNulError> { // SAFETY: The cast from c_char to u8 is ok because a c_char is always one byte. let bytes = unsafe { core::slice::from_raw_parts(str.as_ptr().cast(), str.len()) }; core::ffi::CStr::from_bytes_until_nul(bytes) } #[derive(Debug)] pub struct CStrTooLargeForStaticArray { pub static_array_size: usize, pub c_str_size: usize, } #[cfg(feature = "std")] impl std::error::Error for CStrTooLargeForStaticArray {} impl fmt::Display for CStrTooLargeForStaticArray { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "static `c_char` target array of length `{}` is too small to write a `CStr` (with `NUL`-terminator) of length `{}`", self.static_array_size, self.c_str_size ) } } #[inline] pub(crate) fn write_c_str_slice_with_nul( target: &mut [c_char], str: &core::ffi::CStr, ) -> Result<(), CStrTooLargeForStaticArray> { let bytes = str.to_bytes_with_nul(); // SAFETY: The cast from c_char to u8 is ok because a c_char is always one byte. let bytes = unsafe { core::slice::from_raw_parts(bytes.as_ptr().cast(), bytes.len()) }; let static_array_size = target.len(); target .get_mut(..bytes.len()) .ok_or(CStrTooLargeForStaticArray { static_array_size, c_str_size: bytes.len(), })? .copy_from_slice(bytes); Ok(()) } ash-0.38.0+1.3.281/src/vk.rs000064400000000000000000000032301046102023000130500ustar 00000000000000#![allow( clippy::too_many_arguments, clippy::cognitive_complexity, clippy::wrong_self_convention, unused_qualifications )] #[macro_use] mod macros; mod aliases; pub use aliases::*; mod bitflags; pub use bitflags::*; #[cfg(feature = "debug")] mod const_debugs; mod constants; pub use constants::*; mod definitions; pub use definitions::*; mod enums; pub use enums::*; mod extensions; pub use extensions::*; mod feature_extensions; mod features; pub use features::*; mod prelude; pub use prelude::*; /// Native bindings from Vulkan headers, generated by bindgen #[allow(clippy::useless_transmute, nonstandard_style)] pub mod native; mod platform_types; pub use platform_types::*; /// Iterates through the pointer chain. Includes the item that is passed into the function. /// Stops at the last [`BaseOutStructure`] that has a null [`BaseOutStructure::p_next`] field. pub(crate) unsafe fn ptr_chain_iter( ptr: &mut T, ) -> impl Iterator> { let ptr = <*mut T>::cast::>(ptr); (0..).scan(ptr, |p_ptr, _| { if p_ptr.is_null() { return None; } let n_ptr = (**p_ptr).p_next; let old = *p_ptr; *p_ptr = n_ptr; Some(old) }) } pub trait Handle: Sized { const TYPE: ObjectType; fn as_raw(self) -> u64; fn from_raw(_: u64) -> Self; /// Returns whether the handle is a `NULL` value. /// /// # Example /// /// ``` /// # use ash::vk::{Handle, Instance}; /// let instance = Instance::null(); /// assert!(instance.is_null()); /// ``` fn is_null(self) -> bool { self.as_raw() == 0 } } ash-0.38.0+1.3.281/tests/constant_size_arrays.rs000064400000000000000000000017611046102023000172560ustar 00000000000000use ash::vk::{PhysicalDeviceProperties, PipelineColorBlendStateCreateInfo}; #[test] fn assert_struct_field_is_array() { let pipeline_cache_uuid: [u8; 16] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let _ = PhysicalDeviceProperties::default().pipeline_cache_uuid(pipeline_cache_uuid); let _ = PhysicalDeviceProperties { pipeline_cache_uuid, ..Default::default() }; let blend_constants: [f32; 4] = [0.0, 0.0, 0.0, 0.0]; let _ = PipelineColorBlendStateCreateInfo::default().blend_constants(blend_constants); let _ = PipelineColorBlendStateCreateInfo { blend_constants, ..Default::default() }; } #[test] #[allow(dead_code)] fn assert_ffi_array_param_is_pointer() { // don't run it, just make sure it compiles unsafe fn dummy(device: &ash::Device, cmd_buffer: ash::vk::CommandBuffer) { let blend_constants: [f32; 4] = [0.0, 0.0, 0.0, 0.0]; device.cmd_set_blend_constants(cmd_buffer, &blend_constants); } } ash-0.38.0+1.3.281/tests/display.rs000064400000000000000000000005561046102023000144600ustar 00000000000000use ash::vk; #[test] fn debug_flags() { assert_eq!( format!( "{:?}", vk::AccessFlags::INDIRECT_COMMAND_READ | vk::AccessFlags::VERTEX_ATTRIBUTE_READ ), "INDIRECT_COMMAND_READ | VERTEX_ATTRIBUTE_READ" ); } #[test] fn debug_enum() { assert_eq!(format!("{:?}", vk::ChromaLocation::MIDPOINT), "MIDPOINT"); }