diff --git a/Cargo.toml b/Cargo.toml index 27cfe35..f0053da 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ rust-version = "1.60.0" [features] default = ["x11", "wayland", "wayland-dlopen"] -wayland = ["wayland-backend", "wayland-client", "nix", "fastrand"] +wayland = ["wayland-backend", "wayland-client", "memmap2", "nix", "fastrand"] wayland-dlopen = ["wayland-sys/dlopen"] x11 = ["bytemuck", "nix", "x11rb", "x11-dl"] @@ -25,6 +25,7 @@ thiserror = "1.0.30" [target.'cfg(all(unix, not(any(target_vendor = "apple", target_os = "android", target_os = "redox"))))'.dependencies] bytemuck = { version = "1.12.3", optional = true } +memmap2 = { version = "0.5.8", optional = true } nix = { version = "0.26.1", optional = true } wayland-backend = { version = "0.1.0", features = ["client_system"], optional = true } wayland-client = { version = "0.30.0", optional = true } diff --git a/src/wayland/buffer.rs b/src/wayland/buffer.rs index c6535a1..5517312 100644 --- a/src/wayland/buffer.rs +++ b/src/wayland/buffer.rs @@ -1,7 +1,9 @@ +use memmap2::MmapMut; use std::{ ffi::CStr, fs::File, - os::unix::prelude::{AsRawFd, FileExt, FromRawFd}, + os::unix::prelude::{AsRawFd, FromRawFd}, + slice, sync::{ atomic::{AtomicBool, Ordering}, Arc, @@ -69,9 +71,19 @@ fn create_memfile() -> File { panic!("Failed to generate non-existant shm name") } +// Round size to use for pool for given dimentions, rounding up to power of 2 +fn get_pool_size(width: i32, height: i32) -> i32 { + ((width * height * 4) as u32).next_power_of_two() as i32 +} + +unsafe fn map_file(file: &File) -> MmapMut { + unsafe { MmapMut::map_mut(file.as_raw_fd()).expect("Failed to map shared memory") } +} + pub(super) struct WaylandBuffer { qh: QueueHandle, tempfile: File, + map: MmapMut, pool: wl_shm_pool::WlShmPool, pool_size: i32, buffer: wl_buffer::WlBuffer, @@ -82,8 +94,15 @@ pub(super) struct WaylandBuffer { impl WaylandBuffer { pub fn new(shm: &wl_shm::WlShm, width: i32, height: i32, qh: &QueueHandle) -> Self { + // Calculate size to use for shm pool + let pool_size = get_pool_size(width, height); + + // Create an `mmap` shared memory let tempfile = create_memfile(); - let pool_size = width * height * 4; + let _ = tempfile.set_len(pool_size as u64); + let map = unsafe { map_file(&tempfile) }; + + // Create wayland shm pool and buffer let pool = shm.create_pool(tempfile.as_raw_fd(), pool_size, qh, ()); let released = Arc::new(AtomicBool::new(true)); let buffer = pool.create_buffer( @@ -95,8 +114,10 @@ impl WaylandBuffer { qh, released.clone(), ); + Self { qh: qh.clone(), + map, tempfile, pool, pool_size, @@ -119,6 +140,7 @@ impl WaylandBuffer { let _ = self.tempfile.set_len(size as u64); self.pool.resize(size); self.pool_size = size; + self.map = unsafe { map_file(&self.tempfile) }; } // Create buffer with correct size @@ -131,15 +153,10 @@ impl WaylandBuffer { &self.qh, self.released.clone(), ); - } - } - pub fn write(&self, buffer: &[u32]) { - let buffer = - unsafe { std::slice::from_raw_parts(buffer.as_ptr() as *const u8, buffer.len() * 4) }; - self.tempfile - .write_all_at(buffer, 0) - .expect("Failed to write buffer to temporary file."); + self.width = width; + self.height = height; + } } pub fn attach(&self, surface: &wl_surface::WlSurface) { @@ -150,6 +167,14 @@ impl WaylandBuffer { pub fn released(&self) -> bool { self.released.load(Ordering::SeqCst) } + + fn len(&self) -> usize { + self.width as usize * self.height as usize + } + + pub unsafe fn mapped_mut(&mut self) -> &mut [u32] { + unsafe { slice::from_raw_parts_mut(self.map.as_mut_ptr() as *mut u32, self.len()) } + } } impl Drop for WaylandBuffer { diff --git a/src/wayland/mod.rs b/src/wayland/mod.rs index 4f96c4a..bbe9231 100644 --- a/src/wayland/mod.rs +++ b/src/wayland/mod.rs @@ -47,6 +47,8 @@ pub struct WaylandImpl { display: Arc, surface: wl_surface::WlSurface, buffers: Option<(WaylandBuffer, WaylandBuffer)>, + width: i32, + height: i32, } impl WaylandImpl { @@ -72,31 +74,44 @@ impl WaylandImpl { display, surface, buffers: Default::default(), + width: 0, + height: 0, }) } - fn buffer(&mut self, width: i32, height: i32) -> &WaylandBuffer { - self.buffers = Some(if let Some((front, mut back)) = self.buffers.take() { - // Swap buffers; block if back buffer not released yet + // Allocate front and back buffer + fn alloc_buffers(&mut self) { + self.buffers = Some(( + WaylandBuffer::new(&self.display.shm, self.width, self.height, &self.display.qh), + WaylandBuffer::new(&self.display.shm, self.width, self.height, &self.display.qh), + )); + } + + fn resize(&mut self, width: u32, height: u32) { + self.width = width as i32; + self.height = height as i32; + } + + fn buffer_mut(&mut self) -> &mut [u32] { + if let Some((_front, back)) = &mut self.buffers { + // Block if back buffer not released yet if !back.released() { let mut event_queue = self.display.event_queue.lock().unwrap(); while !back.released() { event_queue.blocking_dispatch(&mut State).unwrap(); } } - back.resize(width, height); - (back, front) + + // Resize, if buffer isn't large enough + back.resize(self.width, self.height); } else { - // Allocate front and back buffer - ( - WaylandBuffer::new(&self.display.shm, width, height, &self.display.qh), - WaylandBuffer::new(&self.display.shm, width, height, &self.display.qh), - ) - }); - &self.buffers.as_ref().unwrap().0 + self.alloc_buffers(); + }; + + unsafe { self.buffers.as_mut().unwrap().1.mapped_mut() } } - pub(super) unsafe fn set_buffer(&mut self, buffer: &[u32], width: u16, height: u16) { + fn present(&mut self) { let _ = self .display .event_queue @@ -104,29 +119,37 @@ impl WaylandImpl { .unwrap() .dispatch_pending(&mut State); - let surface = self.surface.clone(); - let wayland_buffer = self.buffer(width.into(), height.into()); - wayland_buffer.write(buffer); - wayland_buffer.attach(&surface); - - // FIXME: Proper damaging mechanism. - // - // In order to propagate changes on compositors which track damage, for now damage the entire surface. - if self.surface.version() < 4 { - // FIXME: Accommodate scale factor since wl_surface::damage is in terms of surface coordinates while - // wl_surface::damage_buffer is in buffer coordinates. + if let Some((front, back)) = &mut self.buffers { + // Swap front and back buffer + std::mem::swap(front, back); + + front.attach(&self.surface); + + // FIXME: Proper damaging mechanism. // - // i32::MAX is a valid damage box (most compositors interpret the damage box as "the entire surface") - self.surface.damage(0, 0, i32::MAX, i32::MAX); - } else { - // Introduced in version 4, it is an error to use this request in version 3 or lower. - self.surface - .damage_buffer(0, 0, width as i32, height as i32); + // In order to propagate changes on compositors which track damage, for now damage the entire surface. + if self.surface.version() < 4 { + // FIXME: Accommodate scale factor since wl_surface::damage is in terms of surface coordinates while + // wl_surface::damage_buffer is in buffer coordinates. + // + // i32::MAX is a valid damage box (most compositors interpret the damage box as "the entire surface") + self.surface.damage(0, 0, i32::MAX, i32::MAX); + } else { + // Introduced in version 4, it is an error to use this request in version 3 or lower. + self.surface.damage_buffer(0, 0, self.width, self.height); + } + + self.surface.commit(); } - self.surface.commit(); let _ = self.display.event_queue.lock().unwrap().flush(); } + + pub unsafe fn set_buffer(&mut self, buffer: &[u32], width: u16, height: u16) { + self.resize(width.into(), height.into()); + self.buffer_mut().copy_from_slice(buffer); + self.present(); + } } impl Dispatch for State {