#![allow(clippy::trivially_copy_pass_by_ref)]
use crate::prelude::*;
use crate::vk;
use crate::RawPtr;
use std::mem;
use std::os::raw::c_void;
use std::ptr;
#[derive(Clone)]
pub struct Device {
pub(crate) handle: vk::Device,
pub(crate) device_fn_1_0: vk::DeviceFnV1_0,
pub(crate) device_fn_1_1: vk::DeviceFnV1_1,
pub(crate) device_fn_1_2: vk::DeviceFnV1_2,
pub(crate) device_fn_1_3: vk::DeviceFnV1_3,
}
impl Device {
pub unsafe fn load(instance_fn: &vk::InstanceFnV1_0, device: vk::Device) -> Self {
let load_fn = |name: &std::ffi::CStr| {
mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr()))
};
Self {
handle: device,
device_fn_1_0: vk::DeviceFnV1_0::load(load_fn),
device_fn_1_1: vk::DeviceFnV1_1::load(load_fn),
device_fn_1_2: vk::DeviceFnV1_2::load(load_fn),
device_fn_1_3: vk::DeviceFnV1_3::load(load_fn),
}
}
#[inline]
pub fn handle(&self) -> vk::Device {
self.handle
}
}
#[allow(non_camel_case_types)]
impl Device {
#[inline]
pub fn fp_v1_3(&self) -> &vk::DeviceFnV1_3 {
&self.device_fn_1_3
}
#[inline]
pub unsafe fn create_private_data_slot(
&self,
create_info: &vk::PrivateDataSlotCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::PrivateDataSlot> {
let mut private_data_slot = mem::zeroed();
(self.device_fn_1_3.create_private_data_slot)(
self.handle,
create_info,
allocation_callbacks.as_raw_ptr(),
&mut private_data_slot,
)
.result_with_success(private_data_slot)
}
#[inline]
pub unsafe fn destroy_private_data_slot(
&self,
private_data_slot: vk::PrivateDataSlot,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_3.destroy_private_data_slot)(
self.handle,
private_data_slot,
allocation_callbacks.as_raw_ptr(),
)
}
#[inline]
pub unsafe fn set_private_data<T: vk::Handle>(
&self,
object: T,
private_data_slot: vk::PrivateDataSlot,
data: u64,
) -> VkResult<()> {
(self.device_fn_1_3.set_private_data)(
self.handle,
T::TYPE,
object.as_raw(),
private_data_slot,
data,
)
.result()
}
#[inline]
pub unsafe fn get_private_data<T: vk::Handle>(
&self,
object: T,
private_data_slot: vk::PrivateDataSlot,
) -> u64 {
let mut data = mem::zeroed();
(self.device_fn_1_3.get_private_data)(
self.handle,
T::TYPE,
object.as_raw(),
private_data_slot,
&mut data,
);
data
}
#[inline]
pub unsafe fn cmd_pipeline_barrier2(
&self,
command_buffer: vk::CommandBuffer,
dependency_info: &vk::DependencyInfo,
) {
(self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info)
}
#[inline]
pub unsafe fn cmd_reset_event2(
&self,
command_buffer: vk::CommandBuffer,
event: vk::Event,
stage_mask: vk::PipelineStageFlags2,
) {
(self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask)
}
#[inline]
pub unsafe fn cmd_set_event2(
&self,
command_buffer: vk::CommandBuffer,
event: vk::Event,
dependency_info: &vk::DependencyInfo,
) {
(self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info)
}
#[inline]
pub unsafe fn cmd_wait_events2(
&self,
command_buffer: vk::CommandBuffer,
events: &[vk::Event],
dependency_infos: &[vk::DependencyInfo],
) {
assert_eq!(events.len(), dependency_infos.len());
(self.device_fn_1_3.cmd_wait_events2)(
command_buffer,
events.len() as u32,
events.as_ptr(),
dependency_infos.as_ptr(),
)
}
#[inline]
pub unsafe fn cmd_write_timestamp2(
&self,
command_buffer: vk::CommandBuffer,
stage: vk::PipelineStageFlags2,
query_pool: vk::QueryPool,
query: u32,
) {
(self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
}
#[inline]
pub unsafe fn queue_submit2(
&self,
queue: vk::Queue,
submits: &[vk::SubmitInfo2],
fence: vk::Fence,
) -> VkResult<()> {
(self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
.result()
}
#[inline]
pub unsafe fn cmd_copy_buffer2(
&self,
command_buffer: vk::CommandBuffer,
copy_buffer_info: &vk::CopyBufferInfo2,
) {
(self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info)
}
#[inline]
pub unsafe fn cmd_copy_image2(
&self,
command_buffer: vk::CommandBuffer,
copy_image_info: &vk::CopyImageInfo2,
) {
(self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info)
}
#[inline]
pub unsafe fn cmd_copy_buffer_to_image2(
&self,
command_buffer: vk::CommandBuffer,
copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2,
) {
(self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info)
}
#[inline]
pub unsafe fn cmd_copy_image_to_buffer2(
&self,
command_buffer: vk::CommandBuffer,
copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2,
) {
(self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info)
}
#[inline]
pub unsafe fn cmd_blit_image2(
&self,
command_buffer: vk::CommandBuffer,
blit_image_info: &vk::BlitImageInfo2,
) {
(self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info)
}
#[inline]
pub unsafe fn cmd_resolve_image2(
&self,
command_buffer: vk::CommandBuffer,
resolve_image_info: &vk::ResolveImageInfo2,
) {
(self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info)
}
#[inline]
pub unsafe fn cmd_begin_rendering(
&self,
command_buffer: vk::CommandBuffer,
rendering_info: &vk::RenderingInfo,
) {
(self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info)
}
#[inline]
pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
(self.device_fn_1_3.cmd_end_rendering)(command_buffer)
}
#[inline]
pub unsafe fn cmd_set_cull_mode(
&self,
command_buffer: vk::CommandBuffer,
cull_mode: vk::CullModeFlags,
) {
(self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode)
}
#[inline]
pub unsafe fn cmd_set_front_face(
&self,
command_buffer: vk::CommandBuffer,
front_face: vk::FrontFace,
) {
(self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face)
}
#[inline]
pub unsafe fn cmd_set_primitive_topology(
&self,
command_buffer: vk::CommandBuffer,
primitive_topology: vk::PrimitiveTopology,
) {
(self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
}
#[inline]
pub unsafe fn cmd_set_viewport_with_count(
&self,
command_buffer: vk::CommandBuffer,
viewports: &[vk::Viewport],
) {
(self.device_fn_1_3.cmd_set_viewport_with_count)(
command_buffer,
viewports.len() as u32,
viewports.as_ptr(),
)
}
#[inline]
pub unsafe fn cmd_set_scissor_with_count(
&self,
command_buffer: vk::CommandBuffer,
scissors: &[vk::Rect2D],
) {
(self.device_fn_1_3.cmd_set_scissor_with_count)(
command_buffer,
scissors.len() as u32,
scissors.as_ptr(),
)
}
#[inline]
pub unsafe fn cmd_bind_vertex_buffers2(
&self,
command_buffer: vk::CommandBuffer,
first_binding: u32,
buffers: &[vk::Buffer],
offsets: &[vk::DeviceSize],
sizes: Option<&[vk::DeviceSize]>,
strides: Option<&[vk::DeviceSize]>,
) {
assert_eq!(offsets.len(), buffers.len());
let p_sizes = if let Some(sizes) = sizes {
assert_eq!(sizes.len(), buffers.len());
sizes.as_ptr()
} else {
ptr::null()
};
let p_strides = if let Some(strides) = strides {
assert_eq!(strides.len(), buffers.len());
strides.as_ptr()
} else {
ptr::null()
};
(self.device_fn_1_3.cmd_bind_vertex_buffers2)(
command_buffer,
first_binding,
buffers.len() as u32,
buffers.as_ptr(),
offsets.as_ptr(),
p_sizes,
p_strides,
)
}
#[inline]
pub unsafe fn cmd_set_depth_test_enable(
&self,
command_buffer: vk::CommandBuffer,
depth_test_enable: bool,
) {
(self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
}
#[inline]
pub unsafe fn cmd_set_depth_write_enable(
&self,
command_buffer: vk::CommandBuffer,
depth_write_enable: bool,
) {
(self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into())
}
#[inline]
pub unsafe fn cmd_set_depth_compare_op(
&self,
command_buffer: vk::CommandBuffer,
depth_compare_op: vk::CompareOp,
) {
(self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op)
}
#[inline]
pub unsafe fn cmd_set_depth_bounds_test_enable(
&self,
command_buffer: vk::CommandBuffer,
depth_bounds_test_enable: bool,
) {
(self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
command_buffer,
depth_bounds_test_enable.into(),
)
}
#[inline]
pub unsafe fn cmd_set_stencil_test_enable(
&self,
command_buffer: vk::CommandBuffer,
stencil_test_enable: bool,
) {
(self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into())
}
#[inline]
pub unsafe fn cmd_set_stencil_op(
&self,
command_buffer: vk::CommandBuffer,
face_mask: vk::StencilFaceFlags,
fail_op: vk::StencilOp,
pass_op: vk::StencilOp,
depth_fail_op: vk::StencilOp,
compare_op: vk::CompareOp,
) {
(self.device_fn_1_3.cmd_set_stencil_op)(
command_buffer,
face_mask,
fail_op,
pass_op,
depth_fail_op,
compare_op,
)
}
#[inline]
pub unsafe fn cmd_set_rasterizer_discard_enable(
&self,
command_buffer: vk::CommandBuffer,
rasterizer_discard_enable: bool,
) {
(self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
command_buffer,
rasterizer_discard_enable.into(),
)
}
#[inline]
pub unsafe fn cmd_set_depth_bias_enable(
&self,
command_buffer: vk::CommandBuffer,
depth_bias_enable: bool,
) {
(self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
}
#[inline]
pub unsafe fn cmd_set_primitive_restart_enable(
&self,
command_buffer: vk::CommandBuffer,
primitive_restart_enable: bool,
) {
(self.device_fn_1_3.cmd_set_primitive_restart_enable)(
command_buffer,
primitive_restart_enable.into(),
)
}
#[inline]
pub unsafe fn get_device_buffer_memory_requirements(
&self,
memory_requirements: &vk::DeviceBufferMemoryRequirements,
out: &mut vk::MemoryRequirements2,
) {
(self.device_fn_1_3.get_device_buffer_memory_requirements)(
self.handle,
memory_requirements,
out,
)
}
#[inline]
pub unsafe fn get_device_image_memory_requirements(
&self,
memory_requirements: &vk::DeviceImageMemoryRequirements,
out: &mut vk::MemoryRequirements2,
) {
(self.device_fn_1_3.get_device_image_memory_requirements)(
self.handle,
memory_requirements,
out,
)
}
#[inline]
pub unsafe fn get_device_image_sparse_memory_requirements_len(
&self,
memory_requirements: &vk::DeviceImageMemoryRequirements,
) -> usize {
let mut count = 0;
(self
.device_fn_1_3
.get_device_image_sparse_memory_requirements)(
self.handle,
memory_requirements,
&mut count,
std::ptr::null_mut(),
);
count as usize
}
#[inline]
pub unsafe fn get_device_image_sparse_memory_requirements(
&self,
memory_requirements: &vk::DeviceImageMemoryRequirements,
out: &mut [vk::SparseImageMemoryRequirements2],
) {
let mut count = out.len() as u32;
(self
.device_fn_1_3
.get_device_image_sparse_memory_requirements)(
self.handle,
memory_requirements,
&mut count,
out.as_mut_ptr(),
);
assert_eq!(count as usize, out.len());
}
}
#[allow(non_camel_case_types)]
impl Device {
#[inline]
pub fn fp_v1_2(&self) -> &vk::DeviceFnV1_2 {
&self.device_fn_1_2
}
#[inline]
pub unsafe fn cmd_draw_indirect_count(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
count_buffer: vk::Buffer,
count_buffer_offset: vk::DeviceSize,
max_draw_count: u32,
stride: u32,
) {
(self.device_fn_1_2.cmd_draw_indirect_count)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
pub unsafe fn cmd_draw_indexed_indirect_count(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
count_buffer: vk::Buffer,
count_buffer_offset: vk::DeviceSize,
max_draw_count: u32,
stride: u32,
) {
(self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
command_buffer,
buffer,
offset,
count_buffer,
count_buffer_offset,
max_draw_count,
stride,
);
}
#[inline]
pub unsafe fn create_render_pass2(
&self,
create_info: &vk::RenderPassCreateInfo2,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::RenderPass> {
let mut renderpass = mem::zeroed();
(self.device_fn_1_2.create_render_pass2)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut renderpass,
)
.result_with_success(renderpass)
}
#[inline]
pub unsafe fn cmd_begin_render_pass2(
&self,
command_buffer: vk::CommandBuffer,
render_pass_begin_info: &vk::RenderPassBeginInfo,
subpass_begin_info: &vk::SubpassBeginInfo,
) {
(self.device_fn_1_2.cmd_begin_render_pass2)(
command_buffer,
render_pass_begin_info,
subpass_begin_info,
);
}
#[inline]
pub unsafe fn cmd_next_subpass2(
&self,
command_buffer: vk::CommandBuffer,
subpass_begin_info: &vk::SubpassBeginInfo,
subpass_end_info: &vk::SubpassEndInfo,
) {
(self.device_fn_1_2.cmd_next_subpass2)(
command_buffer,
subpass_begin_info,
subpass_end_info,
);
}
#[inline]
pub unsafe fn cmd_end_render_pass2(
&self,
command_buffer: vk::CommandBuffer,
subpass_end_info: &vk::SubpassEndInfo,
) {
(self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
}
#[inline]
pub unsafe fn reset_query_pool(
&self,
query_pool: vk::QueryPool,
first_query: u32,
query_count: u32,
) {
(self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count);
}
#[inline]
pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
let mut value = 0;
(self.device_fn_1_2.get_semaphore_counter_value)(self.handle(), semaphore, &mut value)
.result_with_success(value)
}
#[inline]
pub unsafe fn wait_semaphores(
&self,
wait_info: &vk::SemaphoreWaitInfo,
timeout: u64,
) -> VkResult<()> {
(self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result()
}
#[inline]
pub unsafe fn signal_semaphore(&self, signal_info: &vk::SemaphoreSignalInfo) -> VkResult<()> {
(self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result()
}
#[inline]
pub unsafe fn get_buffer_device_address(
&self,
info: &vk::BufferDeviceAddressInfo,
) -> vk::DeviceAddress {
(self.device_fn_1_2.get_buffer_device_address)(self.handle(), info)
}
#[inline]
pub unsafe fn get_buffer_opaque_capture_address(
&self,
info: &vk::BufferDeviceAddressInfo,
) -> u64 {
(self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info)
}
#[inline]
pub unsafe fn get_device_memory_opaque_capture_address(
&self,
info: &vk::DeviceMemoryOpaqueCaptureAddressInfo,
) -> u64 {
(self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
}
}
#[allow(non_camel_case_types)]
impl Device {
#[inline]
pub fn fp_v1_1(&self) -> &vk::DeviceFnV1_1 {
&self.device_fn_1_1
}
#[inline]
pub unsafe fn bind_buffer_memory2(
&self,
bind_infos: &[vk::BindBufferMemoryInfo],
) -> VkResult<()> {
(self.device_fn_1_1.bind_buffer_memory2)(
self.handle(),
bind_infos.len() as _,
bind_infos.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn bind_image_memory2(
&self,
bind_infos: &[vk::BindImageMemoryInfo],
) -> VkResult<()> {
(self.device_fn_1_1.bind_image_memory2)(
self.handle(),
bind_infos.len() as _,
bind_infos.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn get_device_group_peer_memory_features(
&self,
heap_index: u32,
local_device_index: u32,
remote_device_index: u32,
) -> vk::PeerMemoryFeatureFlags {
let mut peer_memory_features = mem::zeroed();
(self.device_fn_1_1.get_device_group_peer_memory_features)(
self.handle(),
heap_index,
local_device_index,
remote_device_index,
&mut peer_memory_features,
);
peer_memory_features
}
#[inline]
pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
(self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
}
#[inline]
pub unsafe fn cmd_dispatch_base(
&self,
command_buffer: vk::CommandBuffer,
base_group_x: u32,
base_group_y: u32,
base_group_z: u32,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) {
(self.device_fn_1_1.cmd_dispatch_base)(
command_buffer,
base_group_x,
base_group_y,
base_group_z,
group_count_x,
group_count_y,
group_count_z,
);
}
#[inline]
pub unsafe fn get_image_memory_requirements2(
&self,
info: &vk::ImageMemoryRequirementsInfo2,
out: &mut vk::MemoryRequirements2,
) {
(self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
}
#[inline]
pub unsafe fn get_buffer_memory_requirements2(
&self,
info: &vk::BufferMemoryRequirementsInfo2,
out: &mut vk::MemoryRequirements2,
) {
(self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
}
#[inline]
pub unsafe fn get_image_sparse_memory_requirements2_len(
&self,
info: &vk::ImageSparseMemoryRequirementsInfo2,
) -> usize {
let mut count = 0;
(self.device_fn_1_1.get_image_sparse_memory_requirements2)(
self.handle(),
info,
&mut count,
ptr::null_mut(),
);
count as usize
}
#[inline]
pub unsafe fn get_image_sparse_memory_requirements2(
&self,
info: &vk::ImageSparseMemoryRequirementsInfo2,
out: &mut [vk::SparseImageMemoryRequirements2],
) {
let mut count = out.len() as u32;
(self.device_fn_1_1.get_image_sparse_memory_requirements2)(
self.handle(),
info,
&mut count,
out.as_mut_ptr(),
);
assert_eq!(count as usize, out.len());
}
#[inline]
pub unsafe fn trim_command_pool(
&self,
command_pool: vk::CommandPool,
flags: vk::CommandPoolTrimFlags,
) {
(self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
}
#[inline]
pub unsafe fn get_device_queue2(&self, queue_info: &vk::DeviceQueueInfo2) -> vk::Queue {
let mut queue = mem::zeroed();
(self.device_fn_1_1.get_device_queue2)(self.handle(), queue_info, &mut queue);
queue
}
#[inline]
pub unsafe fn create_sampler_ycbcr_conversion(
&self,
create_info: &vk::SamplerYcbcrConversionCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::SamplerYcbcrConversion> {
let mut ycbcr_conversion = mem::zeroed();
(self.device_fn_1_1.create_sampler_ycbcr_conversion)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut ycbcr_conversion,
)
.result_with_success(ycbcr_conversion)
}
#[inline]
pub unsafe fn destroy_sampler_ycbcr_conversion(
&self,
ycbcr_conversion: vk::SamplerYcbcrConversion,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
self.handle(),
ycbcr_conversion,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn create_descriptor_update_template(
&self,
create_info: &vk::DescriptorUpdateTemplateCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::DescriptorUpdateTemplate> {
let mut descriptor_update_template = mem::zeroed();
(self.device_fn_1_1.create_descriptor_update_template)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut descriptor_update_template,
)
.result_with_success(descriptor_update_template)
}
#[inline]
pub unsafe fn destroy_descriptor_update_template(
&self,
descriptor_update_template: vk::DescriptorUpdateTemplate,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_1.destroy_descriptor_update_template)(
self.handle(),
descriptor_update_template,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn update_descriptor_set_with_template(
&self,
descriptor_set: vk::DescriptorSet,
descriptor_update_template: vk::DescriptorUpdateTemplate,
data: *const c_void,
) {
(self.device_fn_1_1.update_descriptor_set_with_template)(
self.handle(),
descriptor_set,
descriptor_update_template,
data,
);
}
#[inline]
pub unsafe fn get_descriptor_set_layout_support(
&self,
create_info: &vk::DescriptorSetLayoutCreateInfo,
out: &mut vk::DescriptorSetLayoutSupport,
) {
(self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
}
}
#[allow(non_camel_case_types)]
impl Device {
#[inline]
pub fn fp_v1_0(&self) -> &vk::DeviceFnV1_0 {
&self.device_fn_1_0
}
#[inline]
pub unsafe fn destroy_device(&self, allocation_callbacks: Option<&vk::AllocationCallbacks>) {
(self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
}
#[inline]
pub unsafe fn destroy_sampler(
&self,
sampler: vk::Sampler,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_sampler)(
self.handle(),
sampler,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn free_memory(
&self,
memory: vk::DeviceMemory,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr());
}
#[inline]
pub unsafe fn free_command_buffers(
&self,
command_pool: vk::CommandPool,
command_buffers: &[vk::CommandBuffer],
) {
(self.device_fn_1_0.free_command_buffers)(
self.handle(),
command_pool,
command_buffers.len() as u32,
command_buffers.as_ptr(),
);
}
#[inline]
pub unsafe fn create_event(
&self,
create_info: &vk::EventCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Event> {
let mut event = mem::zeroed();
(self.device_fn_1_0.create_event)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut event,
)
.result_with_success(event)
}
#[inline]
pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
match err_code {
vk::Result::EVENT_SET => Ok(true),
vk::Result::EVENT_RESET => Ok(false),
_ => Err(err_code),
}
}
#[inline]
pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
(self.device_fn_1_0.set_event)(self.handle(), event).result()
}
#[inline]
pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
(self.device_fn_1_0.reset_event)(self.handle(), event).result()
}
#[inline]
pub unsafe fn cmd_set_event(
&self,
command_buffer: vk::CommandBuffer,
event: vk::Event,
stage_mask: vk::PipelineStageFlags,
) {
(self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
}
#[inline]
pub unsafe fn cmd_reset_event(
&self,
command_buffer: vk::CommandBuffer,
event: vk::Event,
stage_mask: vk::PipelineStageFlags,
) {
(self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
}
#[inline]
pub unsafe fn cmd_wait_events(
&self,
command_buffer: vk::CommandBuffer,
events: &[vk::Event],
src_stage_mask: vk::PipelineStageFlags,
dst_stage_mask: vk::PipelineStageFlags,
memory_barriers: &[vk::MemoryBarrier],
buffer_memory_barriers: &[vk::BufferMemoryBarrier],
image_memory_barriers: &[vk::ImageMemoryBarrier],
) {
(self.device_fn_1_0.cmd_wait_events)(
command_buffer,
events.len() as _,
events.as_ptr(),
src_stage_mask,
dst_stage_mask,
memory_barriers.len() as _,
memory_barriers.as_ptr(),
buffer_memory_barriers.len() as _,
buffer_memory_barriers.as_ptr(),
image_memory_barriers.len() as _,
image_memory_barriers.as_ptr(),
);
}
#[inline]
pub unsafe fn destroy_fence(
&self,
fence: vk::Fence,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr());
}
#[inline]
pub unsafe fn destroy_event(
&self,
event: vk::Event,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr());
}
#[inline]
pub unsafe fn destroy_image(
&self,
image: vk::Image,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr());
}
#[inline]
pub unsafe fn destroy_command_pool(
&self,
pool: vk::CommandPool,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_command_pool)(
self.handle(),
pool,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_image_view(
&self,
image_view: vk::ImageView,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_image_view)(
self.handle(),
image_view,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_render_pass(
&self,
renderpass: vk::RenderPass,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_render_pass)(
self.handle(),
renderpass,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_framebuffer(
&self,
framebuffer: vk::Framebuffer,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_framebuffer)(
self.handle(),
framebuffer,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_pipeline_layout(
&self,
pipeline_layout: vk::PipelineLayout,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_pipeline_layout)(
self.handle(),
pipeline_layout,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_pipeline_cache(
&self,
pipeline_cache: vk::PipelineCache,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_pipeline_cache)(
self.handle(),
pipeline_cache,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_buffer(
&self,
buffer: vk::Buffer,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_buffer)(
self.handle(),
buffer,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_shader_module(
&self,
shader: vk::ShaderModule,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_shader_module)(
self.handle(),
shader,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_pipeline(
&self,
pipeline: vk::Pipeline,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_pipeline)(
self.handle(),
pipeline,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_semaphore(
&self,
semaphore: vk::Semaphore,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_semaphore)(
self.handle(),
semaphore,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_descriptor_pool(
&self,
pool: vk::DescriptorPool,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_descriptor_pool)(
self.handle(),
pool,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_query_pool(
&self,
pool: vk::QueryPool,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_query_pool)(
self.handle(),
pool,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn destroy_descriptor_set_layout(
&self,
layout: vk::DescriptorSetLayout,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_descriptor_set_layout)(
self.handle(),
layout,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn free_descriptor_sets(
&self,
pool: vk::DescriptorPool,
descriptor_sets: &[vk::DescriptorSet],
) -> VkResult<()> {
(self.device_fn_1_0.free_descriptor_sets)(
self.handle(),
pool,
descriptor_sets.len() as u32,
descriptor_sets.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn update_descriptor_sets(
&self,
descriptor_writes: &[vk::WriteDescriptorSet],
descriptor_copies: &[vk::CopyDescriptorSet],
) {
(self.device_fn_1_0.update_descriptor_sets)(
self.handle(),
descriptor_writes.len() as u32,
descriptor_writes.as_ptr(),
descriptor_copies.len() as u32,
descriptor_copies.as_ptr(),
);
}
#[inline]
pub unsafe fn create_sampler(
&self,
create_info: &vk::SamplerCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Sampler> {
let mut sampler = mem::zeroed();
(self.device_fn_1_0.create_sampler)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut sampler,
)
.result_with_success(sampler)
}
#[inline]
pub unsafe fn cmd_blit_image(
&self,
command_buffer: vk::CommandBuffer,
src_image: vk::Image,
src_image_layout: vk::ImageLayout,
dst_image: vk::Image,
dst_image_layout: vk::ImageLayout,
regions: &[vk::ImageBlit],
filter: vk::Filter,
) {
(self.device_fn_1_0.cmd_blit_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as _,
regions.as_ptr(),
filter,
);
}
#[inline]
pub unsafe fn cmd_resolve_image(
&self,
command_buffer: vk::CommandBuffer,
src_image: vk::Image,
src_image_layout: vk::ImageLayout,
dst_image: vk::Image,
dst_image_layout: vk::ImageLayout,
regions: &[vk::ImageResolve],
) {
(self.device_fn_1_0.cmd_resolve_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_fill_buffer(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
size: vk::DeviceSize,
data: u32,
) {
(self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
}
#[inline]
pub unsafe fn cmd_update_buffer(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
data: &[u8],
) {
(self.device_fn_1_0.cmd_update_buffer)(
command_buffer,
buffer,
offset,
data.len() as u64,
data.as_ptr() as _,
);
}
#[inline]
pub unsafe fn cmd_copy_buffer(
&self,
command_buffer: vk::CommandBuffer,
src_buffer: vk::Buffer,
dst_buffer: vk::Buffer,
regions: &[vk::BufferCopy],
) {
(self.device_fn_1_0.cmd_copy_buffer)(
command_buffer,
src_buffer,
dst_buffer,
regions.len() as u32,
regions.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_copy_image_to_buffer(
&self,
command_buffer: vk::CommandBuffer,
src_image: vk::Image,
src_image_layout: vk::ImageLayout,
dst_buffer: vk::Buffer,
regions: &[vk::BufferImageCopy],
) {
(self.device_fn_1_0.cmd_copy_image_to_buffer)(
command_buffer,
src_image,
src_image_layout,
dst_buffer,
regions.len() as u32,
regions.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_copy_buffer_to_image(
&self,
command_buffer: vk::CommandBuffer,
src_buffer: vk::Buffer,
dst_image: vk::Image,
dst_image_layout: vk::ImageLayout,
regions: &[vk::BufferImageCopy],
) {
(self.device_fn_1_0.cmd_copy_buffer_to_image)(
command_buffer,
src_buffer,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_copy_image(
&self,
command_buffer: vk::CommandBuffer,
src_image: vk::Image,
src_image_layout: vk::ImageLayout,
dst_image: vk::Image,
dst_image_layout: vk::ImageLayout,
regions: &[vk::ImageCopy],
) {
(self.device_fn_1_0.cmd_copy_image)(
command_buffer,
src_image,
src_image_layout,
dst_image,
dst_image_layout,
regions.len() as u32,
regions.as_ptr(),
);
}
#[inline]
pub unsafe fn allocate_descriptor_sets(
&self,
allocate_info: &vk::DescriptorSetAllocateInfo,
) -> VkResult<Vec<vk::DescriptorSet>> {
let mut desc_set = Vec::with_capacity(allocate_info.descriptor_set_count as usize);
(self.device_fn_1_0.allocate_descriptor_sets)(
self.handle(),
allocate_info,
desc_set.as_mut_ptr(),
)
.result()?;
desc_set.set_len(allocate_info.descriptor_set_count as usize);
Ok(desc_set)
}
#[inline]
pub unsafe fn create_descriptor_set_layout(
&self,
create_info: &vk::DescriptorSetLayoutCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::DescriptorSetLayout> {
let mut layout = mem::zeroed();
(self.device_fn_1_0.create_descriptor_set_layout)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut layout,
)
.result_with_success(layout)
}
#[inline]
pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
(self.device_fn_1_0.device_wait_idle)(self.handle()).result()
}
#[inline]
pub unsafe fn create_descriptor_pool(
&self,
create_info: &vk::DescriptorPoolCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::DescriptorPool> {
let mut pool = mem::zeroed();
(self.device_fn_1_0.create_descriptor_pool)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut pool,
)
.result_with_success(pool)
}
#[inline]
pub unsafe fn reset_descriptor_pool(
&self,
pool: vk::DescriptorPool,
flags: vk::DescriptorPoolResetFlags,
) -> VkResult<()> {
(self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result()
}
#[inline]
pub unsafe fn reset_command_pool(
&self,
command_pool: vk::CommandPool,
flags: vk::CommandPoolResetFlags,
) -> VkResult<()> {
(self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
}
#[inline]
pub unsafe fn reset_command_buffer(
&self,
command_buffer: vk::CommandBuffer,
flags: vk::CommandBufferResetFlags,
) -> VkResult<()> {
(self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result()
}
#[inline]
pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
(self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
.result()
}
#[inline]
pub unsafe fn cmd_bind_index_buffer(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
index_type: vk::IndexType,
) {
(self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
}
#[inline]
pub unsafe fn cmd_clear_color_image(
&self,
command_buffer: vk::CommandBuffer,
image: vk::Image,
image_layout: vk::ImageLayout,
clear_color_value: &vk::ClearColorValue,
ranges: &[vk::ImageSubresourceRange],
) {
(self.device_fn_1_0.cmd_clear_color_image)(
command_buffer,
image,
image_layout,
clear_color_value,
ranges.len() as u32,
ranges.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_clear_depth_stencil_image(
&self,
command_buffer: vk::CommandBuffer,
image: vk::Image,
image_layout: vk::ImageLayout,
clear_depth_stencil_value: &vk::ClearDepthStencilValue,
ranges: &[vk::ImageSubresourceRange],
) {
(self.device_fn_1_0.cmd_clear_depth_stencil_image)(
command_buffer,
image,
image_layout,
clear_depth_stencil_value,
ranges.len() as u32,
ranges.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_clear_attachments(
&self,
command_buffer: vk::CommandBuffer,
attachments: &[vk::ClearAttachment],
rects: &[vk::ClearRect],
) {
(self.device_fn_1_0.cmd_clear_attachments)(
command_buffer,
attachments.len() as u32,
attachments.as_ptr(),
rects.len() as u32,
rects.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_draw_indexed(
&self,
command_buffer: vk::CommandBuffer,
index_count: u32,
instance_count: u32,
first_index: u32,
vertex_offset: i32,
first_instance: u32,
) {
(self.device_fn_1_0.cmd_draw_indexed)(
command_buffer,
index_count,
instance_count,
first_index,
vertex_offset,
first_instance,
);
}
#[inline]
pub unsafe fn cmd_draw_indexed_indirect(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
draw_count: u32,
stride: u32,
) {
(self.device_fn_1_0.cmd_draw_indexed_indirect)(
command_buffer,
buffer,
offset,
draw_count,
stride,
);
}
#[inline]
pub unsafe fn cmd_execute_commands(
&self,
primary_command_buffer: vk::CommandBuffer,
secondary_command_buffers: &[vk::CommandBuffer],
) {
(self.device_fn_1_0.cmd_execute_commands)(
primary_command_buffer,
secondary_command_buffers.len() as u32,
secondary_command_buffers.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_bind_descriptor_sets(
&self,
command_buffer: vk::CommandBuffer,
pipeline_bind_point: vk::PipelineBindPoint,
layout: vk::PipelineLayout,
first_set: u32,
descriptor_sets: &[vk::DescriptorSet],
dynamic_offsets: &[u32],
) {
(self.device_fn_1_0.cmd_bind_descriptor_sets)(
command_buffer,
pipeline_bind_point,
layout,
first_set,
descriptor_sets.len() as u32,
descriptor_sets.as_ptr(),
dynamic_offsets.len() as u32,
dynamic_offsets.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_copy_query_pool_results(
&self,
command_buffer: vk::CommandBuffer,
query_pool: vk::QueryPool,
first_query: u32,
query_count: u32,
dst_buffer: vk::Buffer,
dst_offset: vk::DeviceSize,
stride: vk::DeviceSize,
flags: vk::QueryResultFlags,
) {
(self.device_fn_1_0.cmd_copy_query_pool_results)(
command_buffer,
query_pool,
first_query,
query_count,
dst_buffer,
dst_offset,
stride,
flags,
);
}
#[inline]
pub unsafe fn cmd_push_constants(
&self,
command_buffer: vk::CommandBuffer,
layout: vk::PipelineLayout,
stage_flags: vk::ShaderStageFlags,
offset: u32,
constants: &[u8],
) {
(self.device_fn_1_0.cmd_push_constants)(
command_buffer,
layout,
stage_flags,
offset,
constants.len() as _,
constants.as_ptr() as _,
);
}
#[inline]
pub unsafe fn cmd_begin_render_pass(
&self,
command_buffer: vk::CommandBuffer,
render_pass_begin: &vk::RenderPassBeginInfo,
contents: vk::SubpassContents,
) {
(self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
}
#[inline]
pub unsafe fn cmd_next_subpass(
&self,
command_buffer: vk::CommandBuffer,
contents: vk::SubpassContents,
) {
(self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
}
#[inline]
pub unsafe fn cmd_bind_pipeline(
&self,
command_buffer: vk::CommandBuffer,
pipeline_bind_point: vk::PipelineBindPoint,
pipeline: vk::Pipeline,
) {
(self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
}
#[inline]
pub unsafe fn cmd_set_scissor(
&self,
command_buffer: vk::CommandBuffer,
first_scissor: u32,
scissors: &[vk::Rect2D],
) {
(self.device_fn_1_0.cmd_set_scissor)(
command_buffer,
first_scissor,
scissors.len() as u32,
scissors.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
(self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
}
#[inline]
pub unsafe fn cmd_bind_vertex_buffers(
&self,
command_buffer: vk::CommandBuffer,
first_binding: u32,
buffers: &[vk::Buffer],
offsets: &[vk::DeviceSize],
) {
debug_assert_eq!(buffers.len(), offsets.len());
(self.device_fn_1_0.cmd_bind_vertex_buffers)(
command_buffer,
first_binding,
buffers.len() as u32,
buffers.as_ptr(),
offsets.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
(self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
}
#[inline]
pub unsafe fn cmd_draw(
&self,
command_buffer: vk::CommandBuffer,
vertex_count: u32,
instance_count: u32,
first_vertex: u32,
first_instance: u32,
) {
(self.device_fn_1_0.cmd_draw)(
command_buffer,
vertex_count,
instance_count,
first_vertex,
first_instance,
);
}
#[inline]
pub unsafe fn cmd_draw_indirect(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
draw_count: u32,
stride: u32,
) {
(self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
}
#[inline]
pub unsafe fn cmd_dispatch(
&self,
command_buffer: vk::CommandBuffer,
group_count_x: u32,
group_count_y: u32,
group_count_z: u32,
) {
(self.device_fn_1_0.cmd_dispatch)(
command_buffer,
group_count_x,
group_count_y,
group_count_z,
);
}
#[inline]
pub unsafe fn cmd_dispatch_indirect(
&self,
command_buffer: vk::CommandBuffer,
buffer: vk::Buffer,
offset: vk::DeviceSize,
) {
(self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
}
#[inline]
pub unsafe fn cmd_set_viewport(
&self,
command_buffer: vk::CommandBuffer,
first_viewport: u32,
viewports: &[vk::Viewport],
) {
(self.device_fn_1_0.cmd_set_viewport)(
command_buffer,
first_viewport,
viewports.len() as u32,
viewports.as_ptr(),
);
}
#[inline]
pub unsafe fn cmd_set_depth_bias(
&self,
command_buffer: vk::CommandBuffer,
constant_factor: f32,
clamp: f32,
slope_factor: f32,
) {
(self.device_fn_1_0.cmd_set_depth_bias)(
command_buffer,
constant_factor,
clamp,
slope_factor,
);
}
#[inline]
pub unsafe fn cmd_set_blend_constants(
&self,
command_buffer: vk::CommandBuffer,
blend_constants: &[f32; 4],
) {
(self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
}
#[inline]
pub unsafe fn cmd_set_depth_bounds(
&self,
command_buffer: vk::CommandBuffer,
min_depth_bounds: f32,
max_depth_bounds: f32,
) {
(self.device_fn_1_0.cmd_set_depth_bounds)(
command_buffer,
min_depth_bounds,
max_depth_bounds,
);
}
#[inline]
pub unsafe fn cmd_set_stencil_compare_mask(
&self,
command_buffer: vk::CommandBuffer,
face_mask: vk::StencilFaceFlags,
compare_mask: u32,
) {
(self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
}
#[inline]
pub unsafe fn cmd_set_stencil_write_mask(
&self,
command_buffer: vk::CommandBuffer,
face_mask: vk::StencilFaceFlags,
write_mask: u32,
) {
(self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
}
#[inline]
pub unsafe fn cmd_set_stencil_reference(
&self,
command_buffer: vk::CommandBuffer,
face_mask: vk::StencilFaceFlags,
reference: u32,
) {
(self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
}
#[inline]
pub unsafe fn get_query_pool_results<T>(
&self,
query_pool: vk::QueryPool,
first_query: u32,
query_count: u32,
data: &mut [T],
flags: vk::QueryResultFlags,
) -> VkResult<()> {
let data_length = query_count as usize;
assert!(
data_length <= data.len(),
"query_count was higher than the length of the slice"
);
let data_size = mem::size_of::<T>() * data_length;
(self.device_fn_1_0.get_query_pool_results)(
self.handle(),
query_pool,
first_query,
query_count,
data_size,
data.as_mut_ptr().cast(),
mem::size_of::<T>() as _,
flags,
)
.result()
}
#[inline]
pub unsafe fn cmd_begin_query(
&self,
command_buffer: vk::CommandBuffer,
query_pool: vk::QueryPool,
query: u32,
flags: vk::QueryControlFlags,
) {
(self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
}
#[inline]
pub unsafe fn cmd_end_query(
&self,
command_buffer: vk::CommandBuffer,
query_pool: vk::QueryPool,
query: u32,
) {
(self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
}
#[inline]
pub unsafe fn cmd_reset_query_pool(
&self,
command_buffer: vk::CommandBuffer,
pool: vk::QueryPool,
first_query: u32,
query_count: u32,
) {
(self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count);
}
#[inline]
pub unsafe fn cmd_write_timestamp(
&self,
command_buffer: vk::CommandBuffer,
pipeline_stage: vk::PipelineStageFlags,
query_pool: vk::QueryPool,
query: u32,
) {
(self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query);
}
#[inline]
pub unsafe fn create_semaphore(
&self,
create_info: &vk::SemaphoreCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Semaphore> {
let mut semaphore = mem::zeroed();
(self.device_fn_1_0.create_semaphore)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut semaphore,
)
.result_with_success(semaphore)
}
#[inline]
pub unsafe fn create_graphics_pipelines(
&self,
pipeline_cache: vk::PipelineCache,
create_infos: &[vk::GraphicsPipelineCreateInfo],
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
let mut pipelines = Vec::with_capacity(create_infos.len());
let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr(),
allocation_callbacks.as_raw_ptr(),
pipelines.as_mut_ptr(),
);
pipelines.set_len(create_infos.len());
match err_code {
vk::Result::SUCCESS => Ok(pipelines),
_ => Err((pipelines, err_code)),
}
}
#[inline]
pub unsafe fn create_compute_pipelines(
&self,
pipeline_cache: vk::PipelineCache,
create_infos: &[vk::ComputePipelineCreateInfo],
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
let mut pipelines = Vec::with_capacity(create_infos.len());
let err_code = (self.device_fn_1_0.create_compute_pipelines)(
self.handle(),
pipeline_cache,
create_infos.len() as u32,
create_infos.as_ptr(),
allocation_callbacks.as_raw_ptr(),
pipelines.as_mut_ptr(),
);
pipelines.set_len(create_infos.len());
match err_code {
vk::Result::SUCCESS => Ok(pipelines),
_ => Err((pipelines, err_code)),
}
}
#[inline]
pub unsafe fn create_buffer(
&self,
create_info: &vk::BufferCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Buffer> {
let mut buffer = mem::zeroed();
(self.device_fn_1_0.create_buffer)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut buffer,
)
.result_with_success(buffer)
}
#[inline]
pub unsafe fn create_pipeline_layout(
&self,
create_info: &vk::PipelineLayoutCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::PipelineLayout> {
let mut pipeline_layout = mem::zeroed();
(self.device_fn_1_0.create_pipeline_layout)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut pipeline_layout,
)
.result_with_success(pipeline_layout)
}
#[inline]
pub unsafe fn create_pipeline_cache(
&self,
create_info: &vk::PipelineCacheCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::PipelineCache> {
let mut pipeline_cache = mem::zeroed();
(self.device_fn_1_0.create_pipeline_cache)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut pipeline_cache,
)
.result_with_success(pipeline_cache)
}
#[inline]
pub unsafe fn get_pipeline_cache_data(
&self,
pipeline_cache: vk::PipelineCache,
) -> VkResult<Vec<u8>> {
read_into_uninitialized_vector(|count, data| {
(self.device_fn_1_0.get_pipeline_cache_data)(
self.handle(),
pipeline_cache,
count,
data as _,
)
})
}
#[inline]
pub unsafe fn merge_pipeline_caches(
&self,
dst_cache: vk::PipelineCache,
src_caches: &[vk::PipelineCache],
) -> VkResult<()> {
(self.device_fn_1_0.merge_pipeline_caches)(
self.handle(),
dst_cache,
src_caches.len() as u32,
src_caches.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn map_memory(
&self,
memory: vk::DeviceMemory,
offset: vk::DeviceSize,
size: vk::DeviceSize,
flags: vk::MemoryMapFlags,
) -> VkResult<*mut c_void> {
let mut data: *mut c_void = ptr::null_mut();
(self.device_fn_1_0.map_memory)(self.handle(), memory, offset, size, flags, &mut data)
.result_with_success(data)
}
#[inline]
pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
(self.device_fn_1_0.unmap_memory)(self.handle(), memory);
}
#[inline]
pub unsafe fn invalidate_mapped_memory_ranges(
&self,
ranges: &[vk::MappedMemoryRange],
) -> VkResult<()> {
(self.device_fn_1_0.invalidate_mapped_memory_ranges)(
self.handle(),
ranges.len() as u32,
ranges.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn flush_mapped_memory_ranges(
&self,
ranges: &[vk::MappedMemoryRange],
) -> VkResult<()> {
(self.device_fn_1_0.flush_mapped_memory_ranges)(
self.handle(),
ranges.len() as u32,
ranges.as_ptr(),
)
.result()
}
#[inline]
pub unsafe fn create_framebuffer(
&self,
create_info: &vk::FramebufferCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Framebuffer> {
let mut framebuffer = mem::zeroed();
(self.device_fn_1_0.create_framebuffer)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut framebuffer,
)
.result_with_success(framebuffer)
}
#[inline]
pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
let mut queue = mem::zeroed();
(self.device_fn_1_0.get_device_queue)(
self.handle(),
queue_family_index,
queue_index,
&mut queue,
);
queue
}
#[inline]
pub unsafe fn cmd_pipeline_barrier(
&self,
command_buffer: vk::CommandBuffer,
src_stage_mask: vk::PipelineStageFlags,
dst_stage_mask: vk::PipelineStageFlags,
dependency_flags: vk::DependencyFlags,
memory_barriers: &[vk::MemoryBarrier],
buffer_memory_barriers: &[vk::BufferMemoryBarrier],
image_memory_barriers: &[vk::ImageMemoryBarrier],
) {
(self.device_fn_1_0.cmd_pipeline_barrier)(
command_buffer,
src_stage_mask,
dst_stage_mask,
dependency_flags,
memory_barriers.len() as u32,
memory_barriers.as_ptr(),
buffer_memory_barriers.len() as u32,
buffer_memory_barriers.as_ptr(),
image_memory_barriers.len() as u32,
image_memory_barriers.as_ptr(),
);
}
#[inline]
pub unsafe fn create_render_pass(
&self,
create_info: &vk::RenderPassCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::RenderPass> {
let mut renderpass = mem::zeroed();
(self.device_fn_1_0.create_render_pass)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut renderpass,
)
.result_with_success(renderpass)
}
#[inline]
pub unsafe fn begin_command_buffer(
&self,
command_buffer: vk::CommandBuffer,
begin_info: &vk::CommandBufferBeginInfo,
) -> VkResult<()> {
(self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result()
}
#[inline]
pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
(self.device_fn_1_0.end_command_buffer)(command_buffer).result()
}
#[inline]
pub unsafe fn wait_for_fences(
&self,
fences: &[vk::Fence],
wait_all: bool,
timeout: u64,
) -> VkResult<()> {
(self.device_fn_1_0.wait_for_fences)(
self.handle(),
fences.len() as u32,
fences.as_ptr(),
wait_all as u32,
timeout,
)
.result()
}
#[inline]
pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
match err_code {
vk::Result::SUCCESS => Ok(true),
vk::Result::NOT_READY => Ok(false),
_ => Err(err_code),
}
}
#[inline]
pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
(self.device_fn_1_0.queue_wait_idle)(queue).result()
}
#[inline]
pub unsafe fn queue_submit(
&self,
queue: vk::Queue,
submits: &[vk::SubmitInfo],
fence: vk::Fence,
) -> VkResult<()> {
(self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
.result()
}
#[inline]
pub unsafe fn queue_bind_sparse(
&self,
queue: vk::Queue,
bind_info: &[vk::BindSparseInfo],
fence: vk::Fence,
) -> VkResult<()> {
(self.device_fn_1_0.queue_bind_sparse)(
queue,
bind_info.len() as u32,
bind_info.as_ptr(),
fence,
)
.result()
}
#[inline]
pub unsafe fn create_buffer_view(
&self,
create_info: &vk::BufferViewCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::BufferView> {
let mut buffer_view = mem::zeroed();
(self.device_fn_1_0.create_buffer_view)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut buffer_view,
)
.result_with_success(buffer_view)
}
#[inline]
pub unsafe fn destroy_buffer_view(
&self,
buffer_view: vk::BufferView,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) {
(self.device_fn_1_0.destroy_buffer_view)(
self.handle(),
buffer_view,
allocation_callbacks.as_raw_ptr(),
);
}
#[inline]
pub unsafe fn create_image_view(
&self,
create_info: &vk::ImageViewCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::ImageView> {
let mut image_view = mem::zeroed();
(self.device_fn_1_0.create_image_view)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut image_view,
)
.result_with_success(image_view)
}
#[inline]
pub unsafe fn allocate_command_buffers(
&self,
allocate_info: &vk::CommandBufferAllocateInfo,
) -> VkResult<Vec<vk::CommandBuffer>> {
let mut buffers = Vec::with_capacity(allocate_info.command_buffer_count as usize);
(self.device_fn_1_0.allocate_command_buffers)(
self.handle(),
allocate_info,
buffers.as_mut_ptr(),
)
.result()?;
buffers.set_len(allocate_info.command_buffer_count as usize);
Ok(buffers)
}
#[inline]
pub unsafe fn create_command_pool(
&self,
create_info: &vk::CommandPoolCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::CommandPool> {
let mut pool = mem::zeroed();
(self.device_fn_1_0.create_command_pool)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut pool,
)
.result_with_success(pool)
}
#[inline]
pub unsafe fn create_query_pool(
&self,
create_info: &vk::QueryPoolCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::QueryPool> {
let mut pool = mem::zeroed();
(self.device_fn_1_0.create_query_pool)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut pool,
)
.result_with_success(pool)
}
#[inline]
pub unsafe fn create_image(
&self,
create_info: &vk::ImageCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Image> {
let mut image = mem::zeroed();
(self.device_fn_1_0.create_image)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut image,
)
.result_with_success(image)
}
#[inline]
pub unsafe fn get_image_subresource_layout(
&self,
image: vk::Image,
subresource: vk::ImageSubresource,
) -> vk::SubresourceLayout {
let mut layout = mem::zeroed();
(self.device_fn_1_0.get_image_subresource_layout)(
self.handle(),
image,
&subresource,
&mut layout,
);
layout
}
#[inline]
pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
let mut mem_req = mem::zeroed();
(self.device_fn_1_0.get_image_memory_requirements)(self.handle(), image, &mut mem_req);
mem_req
}
#[inline]
pub unsafe fn get_buffer_memory_requirements(
&self,
buffer: vk::Buffer,
) -> vk::MemoryRequirements {
let mut mem_req = mem::zeroed();
(self.device_fn_1_0.get_buffer_memory_requirements)(self.handle(), buffer, &mut mem_req);
mem_req
}
#[inline]
pub unsafe fn allocate_memory(
&self,
allocate_info: &vk::MemoryAllocateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::DeviceMemory> {
let mut memory = mem::zeroed();
(self.device_fn_1_0.allocate_memory)(
self.handle(),
allocate_info,
allocation_callbacks.as_raw_ptr(),
&mut memory,
)
.result_with_success(memory)
}
#[inline]
pub unsafe fn create_shader_module(
&self,
create_info: &vk::ShaderModuleCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::ShaderModule> {
let mut shader = mem::zeroed();
(self.device_fn_1_0.create_shader_module)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut shader,
)
.result_with_success(shader)
}
#[inline]
pub unsafe fn create_fence(
&self,
create_info: &vk::FenceCreateInfo,
allocation_callbacks: Option<&vk::AllocationCallbacks>,
) -> VkResult<vk::Fence> {
let mut fence = mem::zeroed();
(self.device_fn_1_0.create_fence)(
self.handle(),
create_info,
allocation_callbacks.as_raw_ptr(),
&mut fence,
)
.result_with_success(fence)
}
#[inline]
pub unsafe fn bind_buffer_memory(
&self,
buffer: vk::Buffer,
device_memory: vk::DeviceMemory,
offset: vk::DeviceSize,
) -> VkResult<()> {
(self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
.result()
}
#[inline]
pub unsafe fn bind_image_memory(
&self,
image: vk::Image,
device_memory: vk::DeviceMemory,
offset: vk::DeviceSize,
) -> VkResult<()> {
(self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result()
}
#[inline]
pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
let mut granularity = mem::zeroed();
(self.device_fn_1_0.get_render_area_granularity)(
self.handle(),
render_pass,
&mut granularity,
);
granularity
}
#[inline]
pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
let mut committed_memory_in_bytes = 0;
(self.device_fn_1_0.get_device_memory_commitment)(
self.handle(),
memory,
&mut committed_memory_in_bytes,
);
committed_memory_in_bytes
}
#[inline]
pub unsafe fn get_image_sparse_memory_requirements(
&self,
image: vk::Image,
) -> Vec<vk::SparseImageMemoryRequirements> {
read_into_uninitialized_vector(|count, data| {
(self.device_fn_1_0.get_image_sparse_memory_requirements)(
self.handle(),
image,
count,
data,
);
vk::Result::SUCCESS
})
.unwrap()
}
}