Buffer(handle)
}
+ fn create_persistent_buffer<'device>(
+ &'device self,
+ desc: &BufferDesc,
+ ) -> PersistentBuffer<'device> {
+ assert!(desc.host_mapped);
+
+ let buffer = self.create_buffer(desc);
+ unsafe {
+ let ptr = std::ptr::NonNull::new(self.map_buffer(buffer))
+ .expect("failed to map buffer memory");
+
+ PersistentBuffer {
+ ptr,
+ len: desc.size,
+ buffer,
+ phantom: PhantomData,
+ }
+ }
+ }
+
fn create_image(&self, image_desc: &ImageDesc) -> Image {
debug_assert_ne!(image_desc.layer_count, 0, "layers must be at least one");
debug_assert_ne!(image_desc.width, 0, "width must be at least one");
Pipeline(handle)
}
+ fn debug_name_buffer(&self, buffer: BufferArg, name: &str) {
+ #[cfg(feature = "debug_markers")]
+ if let Some(debug_utils_fn) = &self.debug_utils_fn {
+ let buffer = match buffer {
+ BufferArg::Unmanaged(buffer) => buffer,
+ BufferArg::Persistent(buffer) => buffer.buffer,
+ BufferArg::Transient(_) => return,
+ };
+
+ let buffer_handle;
+ {
+ let buffer_pool = self.buffer_pool.lock();
+ let Some(buffer) = buffer_pool.get(buffer.0) else {
+ return;
+ };
+
+ buffer_handle = buffer.buffer.as_raw();
+ }
+
+ let arena = HybridArena::<512>::new();
+ let object_name = arena.alloc_cstr_from_str(name);
+
+ let name_info = vk::DebugUtilsObjectNameInfoExt {
+ object_type: vk::ObjectType::Buffer,
+ object_handle: buffer_handle,
+ object_name: object_name.as_ptr(),
+ ..default()
+ };
+
+ unsafe { debug_utils_fn.set_debug_utils_object_name_ext(self.device, &name_info) }
+ }
+ }
+
+ fn debug_name_image(&self, image: Image, name: &str) {
+ #[cfg(feature = "debug_markers")]
+ if let Some(debug_utils_fn) = &self.debug_utils_fn {
+ let image_handle;
+ let image_view_handle;
+ {
+ let image_pool = self.image_pool.lock();
+ let Some(image_holder) = image_pool.get(image.0) else {
+ return;
+ };
+
+ match image_holder {
+ VulkanImageHolder::Unique(unique) => {
+ image_handle = unique.image.image.as_raw();
+ image_view_handle = unique.view.as_raw();
+ }
+ VulkanImageHolder::Shared(shared) => {
+ image_handle = 0;
+ image_view_handle = shared.view.as_raw();
+ }
+ VulkanImageHolder::Swapchain(_) => return,
+ }
+ }
+ let arena = HybridArena::<512>::new();
+ let object_name = arena.alloc_cstr_from_str(name);
+
+ if image_handle != 0 {
+ let image_name_info = vk::DebugUtilsObjectNameInfoExt {
+ object_type: vk::ObjectType::Image,
+ object_handle: image_handle,
+ object_name: object_name.as_ptr(),
+ ..default()
+ };
+ unsafe {
+ debug_utils_fn.set_debug_utils_object_name_ext(self.device, &image_name_info)
+ }
+ }
+
+ let image_view_name_info = vk::DebugUtilsObjectNameInfoExt {
+ object_type: vk::ObjectType::ImageView,
+ object_handle: image_view_handle,
+ object_name: object_name.as_ptr(),
+ ..default()
+ };
+ unsafe {
+ debug_utils_fn.set_debug_utils_object_name_ext(self.device, &image_view_name_info)
+ }
+ }
+ }
+
fn destroy_buffer(&self, frame: &Frame, buffer: Buffer) {
if let Some(buffer) = self.buffer_pool.lock().remove(buffer.0) {
assert_eq!(
}
}
+ fn destroy_persistent_buffer(&self, frame: &Frame, buffer: PersistentBuffer) {
+ unsafe { self.unmap_buffer(buffer.buffer) }
+ self.destroy_buffer(frame, buffer.buffer)
+ }
+
fn destroy_image(&self, frame: &Frame, image: Image) {
if let Some(image_holder) = self.image_pool.lock().remove(image.0) {
let frame = self.frame(frame);
}
}
+ fn acquire_swapchain(
+ &self,
+ frame: &Frame,
+ window: &dyn AsRawWindow,
+ width: u32,
+ height: u32,
+ configurator: &mut dyn SwapchainConfigurator,
+ ) -> Result<SwapchainImage, SwapchainOutOfDateError> {
+ self.acquire_swapchain(frame, window, width, height, configurator)
+ }
+
+ fn destroy_swapchain(&self, window: &dyn AsRawWindow) {
+ self.destroy_swapchain(window)
+ }
+
+ unsafe fn map_buffer(&self, buffer: Buffer) -> *mut u8 {
+ let mut buffer_pool = self.buffer_pool.lock();
+ let buffer = buffer_pool.get_mut(buffer.0).unwrap();
+ buffer.map_count += 1;
+ buffer.memory.mapped_ptr()
+ }
+
+ unsafe fn unmap_buffer(&self, buffer: Buffer) {
+ let mut buffer_pool = self.buffer_pool.lock();
+ let buffer = buffer_pool.get_mut(buffer.0).unwrap();
+ assert!(buffer.map_count > 0);
+ buffer.map_count -= 1;
+ }
+
fn request_transient_buffer<'a>(
&self,
frame: &'a Frame,
}
}
- fn cmd_insert_marker(&self, cmd_encoder: &mut CmdEncoder, label_name: &str, color: [f32; 4]) {
+ fn cmd_insert_debug_marker(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ label_name: &str,
+ color: [f32; 4],
+ ) {
#[cfg(feature = "debug_markers")]
if let Some(debug_utils_fn) = &self.debug_utils_fn {
let arena = HybridArena::<256>::new();
}
}
- fn cmd_begin_marker(&self, cmd_encoder: &mut CmdEncoder, label_name: &str, color: [f32; 4]) {
+ fn cmd_begin_debug_marker(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ label_name: &str,
+ color: [f32; 4],
+ ) {
#[cfg(feature = "debug_markers")]
if let Some(debug_utils_fn) = &self.debug_utils_fn {
let arena = HybridArena::<256>::new();
}
}
- fn cmd_end_marker(&self, cmd_encoder: &mut CmdEncoder) {
+ fn cmd_end_debug_marker(&self, cmd_encoder: &mut CmdEncoder) {
#[cfg(feature = "debug_markers")]
if let Some(debug_utils_fn) = &self.debug_utils_fn {
let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
}
}
- fn cmd_compute_touch_swapchain(&self, cmd_encoder: &mut CmdEncoder, image: Image) {
- let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
-
- match self.image_pool.lock().get(image.0) {
- Some(VulkanImageHolder::Swapchain(image)) => {
- assert!(
- !cmd_encoder.swapchains_touched.contains_key(&image.surface),
- "swapchain attached multiple times in a command buffer"
- );
- cmd_encoder.swapchains_touched.insert(
- image.surface,
- VulkanTouchedSwapchain {
- image: image.image,
- layout: vk::ImageLayout::General,
- access_mask: vk::AccessFlags2::SHADER_STORAGE_WRITE,
- stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
- },
- );
-
- // Transition swapchain image to shader storage write
- let image_memory_barriers = &[vk::ImageMemoryBarrier2 {
- src_stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
- src_access_mask: vk::AccessFlags2::NONE,
- dst_stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
- dst_access_mask: vk::AccessFlags2::SHADER_STORAGE_WRITE,
- src_queue_family_index: self.universal_queue_family_index,
- dst_queue_family_index: self.universal_queue_family_index,
- old_layout: vk::ImageLayout::Undefined,
- new_layout: vk::ImageLayout::General,
- image: image.image,
- subresource_range: vk::ImageSubresourceRange {
- aspect_mask: vk::ImageAspectFlags::COLOR,
- base_mip_level: 0,
- level_count: !0,
- base_array_layer: 0,
- layer_count: !0,
- },
- ..default()
- }];
-
- let dependency_info = vk::DependencyInfo {
- image_memory_barriers: image_memory_barriers.into(),
- ..default()
- };
-
- unsafe {
- self.device_fn
- .cmd_pipeline_barrier2(cmd_encoder.command_buffer, &dependency_info)
- };
- }
- _ => panic!(),
- }
- }
-
- fn cmd_barrier(
+ fn cmd_set_bind_group(
&self,
+ frame: &Frame,
cmd_encoder: &mut CmdEncoder,
- global_barrier: Option<&GlobalBarrier>,
- image_barriers: &[ImageBarrier],
+ layout: BindGroupLayout,
+ bind_group_index: u32,
+ bindings: &[Bind],
) {
let arena = HybridArena::<4096>::new();
- let memory_barriers = arena.alloc_slice_fill_iter(
- global_barrier
- .iter()
- .map(|global_barrier| vulkan_memory_barrier(global_barrier)),
- );
-
- let image_memory_barriers =
- arena.alloc_slice_fill_iter(image_barriers.iter().map(|image_barrier| {
- let image = self
- .image_pool
- .lock()
- .get(image_barrier.image.0)
- .expect("invalid image handle")
- .image();
- let subresource_range = vulkan_subresource_range(&image_barrier.subresource_range);
- vulkan_image_memory_barrier(image_barrier, image, subresource_range)
- }));
-
- let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+ let descriptor_set_layout = self
+ .bind_group_layout_pool
+ .lock()
+ .get(layout.0)
+ .unwrap()
+ .descriptor_set_layout;
- #[cfg(debug_assertions)]
- debug_assert!(!cmd_encoder.in_render_pass);
+ let frame = self.frame(frame);
+ let per_thread = frame.per_thread.get(cmd_encoder.thread_token);
- unsafe {
- self.device_fn.cmd_pipeline_barrier2(
- cmd_encoder.command_buffer,
- &vk::DependencyInfo {
- memory_barriers: memory_barriers.into(),
+ let mut descriptor_pool = per_thread.descriptor_pool.get();
+ let mut allocated_pool = false;
+ let descriptor_set = loop {
+ if descriptor_pool.is_null() {
+ // Need to fetch a new descriptor pool
+ descriptor_pool = self.request_descriptor_pool();
+ per_thread.descriptor_pool.set(descriptor_pool);
+ frame.recycle_descriptor_pool(descriptor_pool);
+ allocated_pool = true;
+ }
+ let allocate_info = vk::DescriptorSetAllocateInfo {
+ descriptor_pool,
+ set_layouts: std::slice::from_ref(&descriptor_set_layout).into(),
+ ..default()
+ };
+ let mut descriptor_set = vk::DescriptorSet::null();
+ match unsafe {
+ self.device_fn.allocate_descriptor_sets(
+ self.device,
+ &allocate_info,
+ &mut descriptor_set,
+ )
+ } {
+ vk::Result::Success => break descriptor_set,
+ _ => {
+ // If we fail to allocate after just creating a new descriptor set, then we'll
+ // never be able to allocate one. :'(
+ if allocated_pool {
+ panic!("failed to allocate descriptor set")
+ }
+ }
+ }
+ };
- image_memory_barriers: image_memory_barriers.into(),
- ..default()
- },
- )
- }
- }
-
- unsafe fn cmd_push_constants_unchecked(
- &self,
- cmd_encoder: &mut CmdEncoder,
- stage_flags: ShaderStageFlags,
- offset: u32,
- size: u32,
- src: *const u8,
- ) {
- let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
- let command_buffer = cmd_encoder.command_buffer;
-
- let VulkanBoundPipeline {
- pipeline_layout,
- pipeline_bind_point: _,
- } = cmd_encoder
- .bound_pipeline
- .as_ref()
- .expect("cannot push constants without a pipeline bound")
- .clone();
-
- let stage_flags = vulkan_shader_stage_flags(stage_flags);
- self.device_fn.cmd_push_constants(
- command_buffer,
- pipeline_layout,
- stage_flags,
- offset,
- size,
- src as *const std::ffi::c_void,
- )
- }
-
- fn cmd_copy_buffer_to_image(
- &self,
- cmd_encoder: &mut CmdEncoder,
- src_buffer: BufferArg,
- dst_image: Image,
- dst_image_layout: ImageLayout,
- copies: &[BufferImageCopy],
- ) {
- let arena = HybridArena::<4096>::new();
-
- let (src_buffer, base_offset, _range) = self.unwrap_buffer_arg(&src_buffer);
-
- let regions = arena.alloc_slice_fill_iter(copies.iter().map(|copy| vk::BufferImageCopy {
- buffer_offset: copy.buffer_offset + base_offset,
- buffer_row_length: copy.buffer_row_length,
- buffer_image_height: copy.buffer_image_height,
- image_subresource: vulkan_subresource_layers(©.image_subresource),
- image_offset: copy.image_offset.into(),
- image_extent: copy.image_extent.into(),
- }));
-
- let dst_image = self
- .image_pool
- .lock()
- .get(dst_image.0)
- .expect("invalid image handle")
- .image();
-
- let dst_image_layout = match dst_image_layout {
- ImageLayout::Optimal => vk::ImageLayout::TransferDstOptimal,
- ImageLayout::General => vk::ImageLayout::General,
- };
-
- let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
- unsafe {
- self.device_fn.cmd_copy_buffer_to_image(
- command_buffer,
- src_buffer,
- dst_image,
- dst_image_layout,
- regions,
- )
- }
- }
-
- fn cmd_blit_image(
- &self,
- cmd_encoder: &mut CmdEncoder,
- src_image: Image,
- src_image_layout: ImageLayout,
- dst_image: Image,
- dst_image_layout: ImageLayout,
- regions: &[ImageBlit],
- ) {
- let arena = HybridArena::<4096>::new();
-
- let regions = arena.alloc_slice_fill_iter(regions.iter().map(|blit| vk::ImageBlit {
- src_subresource: vulkan_subresource_layers(&blit.src_subresource),
- src_offsets: [blit.src_offset_min.into(), blit.src_offset_max.into()],
- dst_subresource: vulkan_subresource_layers(&blit.dst_subresource),
- dst_offsets: [blit.dst_offset_min.into(), blit.dst_offset_max.into()],
- }));
-
- let src_image = self
- .image_pool
- .lock()
- .get(src_image.0)
- .expect("invalid src image handle")
- .image();
-
- let src_image_layout = match src_image_layout {
- ImageLayout::Optimal => vk::ImageLayout::TransferSrcOptimal,
- ImageLayout::General => vk::ImageLayout::General,
- };
-
- let dst_image = self
- .image_pool
- .lock()
- .get(dst_image.0)
- .expect("invalid dst image handle")
- .image();
-
- let dst_image_layout = match dst_image_layout {
- ImageLayout::Optimal => vk::ImageLayout::TransferDstOptimal,
- ImageLayout::General => vk::ImageLayout::General,
- };
-
- let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
- unsafe {
- self.device_fn.cmd_blit_image(
- command_buffer,
- src_image,
- src_image_layout,
- dst_image,
- dst_image_layout,
- regions,
- vk::Filter::Linear,
- );
- }
- }
-
- fn cmd_set_bind_group(
- &self,
- frame: &Frame,
- cmd_encoder: &mut CmdEncoder,
- layout: BindGroupLayout,
- bind_group_index: u32,
- bindings: &[Bind],
- ) {
- let arena = HybridArena::<4096>::new();
-
- let descriptor_set_layout = self
- .bind_group_layout_pool
- .lock()
- .get(layout.0)
- .unwrap()
- .descriptor_set_layout;
-
- let frame = self.frame(frame);
- let per_thread = frame.per_thread.get(cmd_encoder.thread_token);
-
- let mut descriptor_pool = per_thread.descriptor_pool.get();
- let mut allocated_pool = false;
- let descriptor_set = loop {
- if descriptor_pool.is_null() {
- // Need to fetch a new descriptor pool
- descriptor_pool = self.request_descriptor_pool();
- per_thread.descriptor_pool.set(descriptor_pool);
- frame.recycle_descriptor_pool(descriptor_pool);
- allocated_pool = true;
- }
- let allocate_info = vk::DescriptorSetAllocateInfo {
- descriptor_pool,
- set_layouts: std::slice::from_ref(&descriptor_set_layout).into(),
- ..default()
- };
- let mut descriptor_set = vk::DescriptorSet::null();
- match unsafe {
- self.device_fn.allocate_descriptor_sets(
- self.device,
- &allocate_info,
- &mut descriptor_set,
- )
- } {
- vk::Result::Success => break descriptor_set,
- _ => {
- // If we fail to allocate after just creating a new descriptor set, then we'll
- // never be able to allocate one. :'(
- if allocated_pool {
- panic!("failed to allocate descriptor set")
- }
- }
- }
- };
-
- let write_descriptors_iter = bindings.iter().map(|bind| match bind.typed {
- TypedBind::Sampler(samplers) => {
- let sampler_infos_iter = samplers.iter().map(|sampler| {
- let sampler = self.sampler_pool.lock().get(sampler.0).unwrap().0;
- vk::DescriptorImageInfo {
- image_layout: vk::ImageLayout::Undefined,
- image_view: vk::ImageView::null(),
- sampler,
- }
- });
- let image_infos = arena.alloc_slice_fill_iter(sampler_infos_iter);
- vk::WriteDescriptorSet {
- dst_set: descriptor_set,
- dst_binding: bind.binding,
- dst_array_element: bind.array_element,
- descriptor_count: image_infos.len() as u32,
- descriptor_type: vk::DescriptorType::Sampler,
- image_info: image_infos.as_ptr(),
+ let write_descriptors_iter = bindings.iter().map(|bind| match bind.typed {
+ TypedBind::Sampler(samplers) => {
+ let sampler_infos_iter = samplers.iter().map(|sampler| {
+ let sampler = self.sampler_pool.lock().get(sampler.0).unwrap().0;
+ vk::DescriptorImageInfo {
+ image_layout: vk::ImageLayout::Undefined,
+ image_view: vk::ImageView::null(),
+ sampler,
+ }
+ });
+ let image_infos = arena.alloc_slice_fill_iter(sampler_infos_iter);
+ vk::WriteDescriptorSet {
+ dst_set: descriptor_set,
+ dst_binding: bind.binding,
+ dst_array_element: bind.array_element,
+ descriptor_count: image_infos.len() as u32,
+ descriptor_type: vk::DescriptorType::Sampler,
+ image_info: image_infos.as_ptr(),
..default()
}
}
let write_descriptors = arena.alloc_slice_fill_iter(write_descriptors_iter);
unsafe {
- self.device_fn
- .update_descriptor_sets(self.device, write_descriptors, &[])
- };
+ self.device_fn
+ .update_descriptor_sets(self.device, write_descriptors, &[])
+ };
+
+ let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+ let VulkanBoundPipeline {
+ pipeline_layout,
+ pipeline_bind_point,
+ } = cmd_encoder
+ .bound_pipeline
+ .as_ref()
+ .expect("cannot set bind groups without a pipeline bound")
+ .clone();
+
+ let command_buffer = cmd_encoder.command_buffer;
+
+ unsafe {
+ self.device_fn.cmd_bind_descriptor_sets(
+ command_buffer,
+ pipeline_bind_point,
+ pipeline_layout,
+ bind_group_index,
+ &[descriptor_set],
+ &[],
+ )
+ }
+ }
+
+ fn cmd_set_index_buffer(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ buffer: BufferArg,
+ offset: u64,
+ index_type: IndexType,
+ ) {
+ let (buffer, base_offset, _range) = self.unwrap_buffer_arg(&buffer);
+
+ let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
+ let index_type = vulkan_index_type(index_type);
+ unsafe {
+ self.device_fn.cmd_bind_index_buffer(
+ command_buffer,
+ buffer,
+ offset + base_offset,
+ index_type,
+ )
+ }
+ }
+
+ fn cmd_compute_touch_swapchain(&self, cmd_encoder: &mut CmdEncoder, image: Image) {
+ let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+
+ match self.image_pool.lock().get(image.0) {
+ Some(VulkanImageHolder::Swapchain(image)) => {
+ assert!(
+ !cmd_encoder.swapchains_touched.contains_key(&image.surface),
+ "swapchain attached multiple times in a command buffer"
+ );
+ cmd_encoder.swapchains_touched.insert(
+ image.surface,
+ VulkanTouchedSwapchain {
+ image: image.image,
+ layout: vk::ImageLayout::General,
+ access_mask: vk::AccessFlags2::SHADER_STORAGE_WRITE,
+ stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
+ },
+ );
+
+ // Transition swapchain image to shader storage write
+ let image_memory_barriers = &[vk::ImageMemoryBarrier2 {
+ src_stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
+ src_access_mask: vk::AccessFlags2::NONE,
+ dst_stage_mask: vk::PipelineStageFlags2::COMPUTE_SHADER,
+ dst_access_mask: vk::AccessFlags2::SHADER_STORAGE_WRITE,
+ src_queue_family_index: self.universal_queue_family_index,
+ dst_queue_family_index: self.universal_queue_family_index,
+ old_layout: vk::ImageLayout::Undefined,
+ new_layout: vk::ImageLayout::General,
+ image: image.image,
+ subresource_range: vk::ImageSubresourceRange {
+ aspect_mask: vk::ImageAspectFlags::COLOR,
+ base_mip_level: 0,
+ level_count: !0,
+ base_array_layer: 0,
+ layer_count: !0,
+ },
+ ..default()
+ }];
+
+ let dependency_info = vk::DependencyInfo {
+ image_memory_barriers: image_memory_barriers.into(),
+ ..default()
+ };
+
+ unsafe {
+ self.device_fn
+ .cmd_pipeline_barrier2(cmd_encoder.command_buffer, &dependency_info)
+ };
+ }
+ _ => panic!(),
+ }
+ }
+
+ fn cmd_set_pipeline(&self, cmd_encoder: &mut CmdEncoder, pipeline: Pipeline) {
+ let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+
+ let vk_pipeline;
+ let pipeline_layout;
+ let pipeline_bind_point;
+ {
+ let pipeline_pool = self.pipeline_pool.lock();
+ let pipeline = pipeline_pool.get(pipeline.0).unwrap();
+ vk_pipeline = pipeline.pipeline;
+ pipeline_layout = pipeline.pipeline_layout.pipeline_layout;
+ pipeline_bind_point = pipeline.pipeline_bind_point;
+ }
+
+ cmd_encoder.bound_pipeline = Some(VulkanBoundPipeline {
+ pipeline_layout,
+ pipeline_bind_point,
+ });
+
+ let command_buffer = cmd_encoder.command_buffer;
+
+ unsafe {
+ self.device_fn
+ .cmd_bind_pipeline(command_buffer, pipeline_bind_point, vk_pipeline)
+ };
+ }
+
+ fn cmd_set_viewports(&self, cmd_encoder: &mut CmdEncoder, viewports: &[crate::Viewport]) {
+ let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
+ unsafe {
+ self.device_fn.cmd_set_viewport_with_count(
+ command_buffer,
+ std::mem::transmute::<_, &[vk::Viewport]>(viewports), // yolo
+ );
+ }
+ }
+
+ fn cmd_set_scissors(&self, cmd_encoder: &mut CmdEncoder, scissors: &[crate::Scissor]) {
+ let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
+ unsafe {
+ self.device_fn.cmd_set_scissor_with_count(
+ command_buffer,
+ std::mem::transmute::<_, &[vk::Rect2d]>(scissors), // yolo
+ );
+ }
+ }
+
+ fn cmd_barrier(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ global_barrier: Option<&GlobalBarrier>,
+ image_barriers: &[ImageBarrier],
+ ) {
+ let arena = HybridArena::<4096>::new();
+
+ let memory_barriers = arena.alloc_slice_fill_iter(
+ global_barrier
+ .iter()
+ .map(|global_barrier| vulkan_memory_barrier(global_barrier)),
+ );
+
+ let image_memory_barriers =
+ arena.alloc_slice_fill_iter(image_barriers.iter().map(|image_barrier| {
+ let image = self
+ .image_pool
+ .lock()
+ .get(image_barrier.image.0)
+ .expect("invalid image handle")
+ .image();
+ let subresource_range = vulkan_subresource_range(&image_barrier.subresource_range);
+ vulkan_image_memory_barrier(image_barrier, image, subresource_range)
+ }));
+
+ let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+
+ #[cfg(debug_assertions)]
+ debug_assert!(!cmd_encoder.in_render_pass);
+
+ unsafe {
+ self.device_fn.cmd_pipeline_barrier2(
+ cmd_encoder.command_buffer,
+ &vk::DependencyInfo {
+ memory_barriers: memory_barriers.into(),
+
+ image_memory_barriers: image_memory_barriers.into(),
+ ..default()
+ },
+ )
+ }
+ }
+ unsafe fn cmd_push_constants_unchecked(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ stage_flags: ShaderStageFlags,
+ offset: u32,
+ size: u32,
+ src: *const u8,
+ ) {
let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+ let command_buffer = cmd_encoder.command_buffer;
+
let VulkanBoundPipeline {
pipeline_layout,
- pipeline_bind_point,
+ pipeline_bind_point: _,
} = cmd_encoder
.bound_pipeline
.as_ref()
- .expect("cannot set bind groups without a pipeline bound")
+ .expect("cannot push constants without a pipeline bound")
.clone();
- let command_buffer = cmd_encoder.command_buffer;
-
- unsafe {
- self.device_fn.cmd_bind_descriptor_sets(
- command_buffer,
- pipeline_bind_point,
- pipeline_layout,
- bind_group_index,
- &[descriptor_set],
- &[],
- )
- }
+ let stage_flags = vulkan_shader_stage_flags(stage_flags);
+ self.device_fn.cmd_push_constants(
+ command_buffer,
+ pipeline_layout,
+ stage_flags,
+ offset,
+ size,
+ src as *const std::ffi::c_void,
+ )
}
- fn cmd_set_index_buffer(
+ fn cmd_copy_buffer_to_image(
&self,
cmd_encoder: &mut CmdEncoder,
- buffer: BufferArg,
- offset: u64,
- index_type: IndexType,
+ src_buffer: BufferArg,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ copies: &[BufferImageCopy],
) {
- let (buffer, base_offset, _range) = self.unwrap_buffer_arg(&buffer);
+ let arena = HybridArena::<4096>::new();
+
+ let (src_buffer, base_offset, _range) = self.unwrap_buffer_arg(&src_buffer);
+
+ let regions = arena.alloc_slice_fill_iter(copies.iter().map(|copy| vk::BufferImageCopy {
+ buffer_offset: copy.buffer_offset + base_offset,
+ buffer_row_length: copy.buffer_row_length,
+ buffer_image_height: copy.buffer_image_height,
+ image_subresource: vulkan_subresource_layers(©.image_subresource),
+ image_offset: copy.image_offset.into(),
+ image_extent: copy.image_extent.into(),
+ }));
+
+ let dst_image = self
+ .image_pool
+ .lock()
+ .get(dst_image.0)
+ .expect("invalid image handle")
+ .image();
+
+ let dst_image_layout = match dst_image_layout {
+ ImageLayout::Optimal => vk::ImageLayout::TransferDstOptimal,
+ ImageLayout::General => vk::ImageLayout::General,
+ };
let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
- let index_type = vulkan_index_type(index_type);
unsafe {
- self.device_fn.cmd_bind_index_buffer(
+ self.device_fn.cmd_copy_buffer_to_image(
command_buffer,
- buffer,
- offset + base_offset,
- index_type,
+ src_buffer,
+ dst_image,
+ dst_image_layout,
+ regions,
)
}
}
- fn cmd_set_pipeline(&self, cmd_encoder: &mut CmdEncoder, pipeline: Pipeline) {
- let cmd_encoder = self.cmd_encoder_mut(cmd_encoder);
+ fn cmd_blit_image(
+ &self,
+ cmd_encoder: &mut CmdEncoder,
+ src_image: Image,
+ src_image_layout: ImageLayout,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ regions: &[ImageBlit],
+ ) {
+ let arena = HybridArena::<4096>::new();
- let vk_pipeline;
- let pipeline_layout;
- let pipeline_bind_point;
- {
- let pipeline_pool = self.pipeline_pool.lock();
- let pipeline = pipeline_pool.get(pipeline.0).unwrap();
- vk_pipeline = pipeline.pipeline;
- pipeline_layout = pipeline.pipeline_layout.pipeline_layout;
- pipeline_bind_point = pipeline.pipeline_bind_point;
- }
+ let regions = arena.alloc_slice_fill_iter(regions.iter().map(|blit| vk::ImageBlit {
+ src_subresource: vulkan_subresource_layers(&blit.src_subresource),
+ src_offsets: [blit.src_offset_min.into(), blit.src_offset_max.into()],
+ dst_subresource: vulkan_subresource_layers(&blit.dst_subresource),
+ dst_offsets: [blit.dst_offset_min.into(), blit.dst_offset_max.into()],
+ }));
- cmd_encoder.bound_pipeline = Some(VulkanBoundPipeline {
- pipeline_layout,
- pipeline_bind_point,
- });
+ let src_image = self
+ .image_pool
+ .lock()
+ .get(src_image.0)
+ .expect("invalid src image handle")
+ .image();
- let command_buffer = cmd_encoder.command_buffer;
+ let src_image_layout = match src_image_layout {
+ ImageLayout::Optimal => vk::ImageLayout::TransferSrcOptimal,
+ ImageLayout::General => vk::ImageLayout::General,
+ };
- unsafe {
- self.device_fn
- .cmd_bind_pipeline(command_buffer, pipeline_bind_point, vk_pipeline)
+ let dst_image = self
+ .image_pool
+ .lock()
+ .get(dst_image.0)
+ .expect("invalid dst image handle")
+ .image();
+
+ let dst_image_layout = match dst_image_layout {
+ ImageLayout::Optimal => vk::ImageLayout::TransferDstOptimal,
+ ImageLayout::General => vk::ImageLayout::General,
};
+
+ let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
+ unsafe {
+ self.device_fn.cmd_blit_image(
+ command_buffer,
+ src_image,
+ src_image_layout,
+ dst_image,
+ dst_image_layout,
+ regions,
+ vk::Filter::Linear,
+ );
+ }
}
fn cmd_begin_rendering(&self, cmd_encoder: &mut CmdEncoder, desc: &crate::RenderingDesc) {
unsafe { self.device_fn.cmd_end_rendering(cmd_encoder.command_buffer) }
}
- fn cmd_set_viewports(&self, cmd_encoder: &mut CmdEncoder, viewports: &[crate::Viewport]) {
- let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
- unsafe {
- self.device_fn.cmd_set_viewport_with_count(
- command_buffer,
- std::mem::transmute::<_, &[vk::Viewport]>(viewports), // yolo
- );
- }
- }
-
- fn cmd_set_scissors(&self, cmd_encoder: &mut CmdEncoder, scissors: &[crate::Scissor]) {
- let command_buffer = self.cmd_encoder_mut(cmd_encoder).command_buffer;
- unsafe {
- self.device_fn.cmd_set_scissor_with_count(
- command_buffer,
- std::mem::transmute::<_, &[vk::Rect2d]>(scissors), // yolo
- );
- }
- }
-
fn cmd_draw(
&self,
cmd_encoder: &mut CmdEncoder,
self.wsi_end_frame(self.frame_mut(&mut frame));
self.frame_counter.release(frame);
}
-
- unsafe fn map_buffer(&self, buffer: Buffer) -> *mut u8 {
- let mut buffer_pool = self.buffer_pool.lock();
- let buffer = buffer_pool.get_mut(buffer.0).unwrap();
- buffer.map_count += 1;
- buffer.memory.mapped_ptr()
- }
-
- unsafe fn unmap_buffer(&self, buffer: Buffer) {
- let mut buffer_pool = self.buffer_pool.lock();
- let buffer = buffer_pool.get_mut(buffer.0).unwrap();
- assert!(buffer.map_count > 0);
- buffer.map_count -= 1;
- }
-
- fn acquire_swapchain(
- &self,
- frame: &Frame,
- window: &dyn AsRawWindow,
- width: u32,
- height: u32,
- configurator: &mut dyn SwapchainConfigurator,
- ) -> Result<SwapchainImage, SwapchainOutOfDateError> {
- self.acquire_swapchain(frame, window, width, height, configurator)
- }
-
- fn destroy_swapchain(&self, window: &dyn AsRawWindow) {
- self.destroy_swapchain(window)
- }
-
- fn create_persistent_buffer<'device>(
- &'device self,
- desc: &BufferDesc,
- ) -> PersistentBuffer<'device> {
- assert!(desc.host_mapped);
-
- let buffer = self.create_buffer(desc);
- unsafe {
- let ptr = std::ptr::NonNull::new(self.map_buffer(buffer))
- .expect("failed to map buffer memory");
-
- PersistentBuffer {
- ptr,
- len: desc.size,
- buffer,
- phantom: PhantomData,
- }
- }
- }
-
- fn destroy_persistent_buffer(&self, frame: &Frame, buffer: PersistentBuffer) {
- unsafe { self.unmap_buffer(buffer.buffer) }
- self.destroy_buffer(frame, buffer.buffer)
- }
}
impl VulkanDevice {