diff --git a/Cargo.toml b/Cargo.toml index f37e23ea..2f916c65 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -86,6 +86,10 @@ wgpu = { version = "0.20", optional = true, features = ["dx12", "hal"] } d3d12 = "0.20" winapi = { version = "0.3", optional = true } +[target.'cfg(target_os = "linux")'.dependencies] +ashpd = "0.9.1" +pipewire = "0.8.0" + [dev-dependencies] futures = "0.3" tokio = { version = "1.37", features = ["rt", "macros", "rt-multi-thread"] } diff --git a/src/feature/bitmap/mod.rs b/src/feature/bitmap/mod.rs index 77176479..8264310b 100644 --- a/src/feature/bitmap/mod.rs +++ b/src/feature/bitmap/mod.rs @@ -36,6 +36,9 @@ use windows::Win32::System::WinRT::Direct3D11::IDirect3DDxgiInterfaceAccess; #[cfg(target_os = "windows")] use windows::Win32::Graphics::Direct3D11::D3D11_USAGE_DYNAMIC; +#[cfg(target_os = "linux")] +use pipewire::spa::param::video::VideoFormat; + #[derive(Clone)] struct BitmapPool { free_bitmaps_and_count: Arc>, usize)>>, @@ -526,6 +529,22 @@ impl VideoFrameBitmapInternal for VideoFrame { Err(VideoFrameBitmapError::Other("Failed to lock iosurface".to_string())) } } + #[cfg(target_os = "linux")] + { + match self.impl_video_frame.format.format() { + VideoFormat::BGRA | VideoFormat::BGRx => { + let plane_ptr = VideoFramePlanePtr { + ptr: self.impl_video_frame.data, + width: self.impl_video_frame.size.width as usize, + height: self.impl_video_frame.size.height as usize, + bytes_per_row: self.impl_video_frame.size.width as usize * 4, + }; + + output_mapping(VideoFrameDataCopyPtrs::Bgra8888(plane_ptr)) + } + _ => Err(VideoFrameBitmapError::Other("Invalid pixel format".to_string())), + } + } } } diff --git a/src/platform/linux_wayland/capture_content.rs b/src/platform/linux_wayland/capture_content.rs new file mode 100644 index 00000000..117b22fb --- /dev/null +++ b/src/platform/linux_wayland/capture_content.rs @@ -0,0 +1,316 @@ +use std::rc::Rc; + +use ashpd::{ + desktop::{ + screencast::{CursorMode, Screencast, SourceType}, + Session, + }, + enumflags2::BitFlags, +}; + +use crate::{ + capturable_content::{CapturableContentError, CapturableContentFilter}, + prelude::Rect, +}; + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct WaylandCapturableWindow { + pub pw_node_id: u32, + pub position: Option<(i32, i32)>, + pub size: Option<(i32, i32)>, + pub id: Option, + pub mapping_id: Option, + pub virt: bool, + pub cursor_as_metadata: bool, +} + +impl WaylandCapturableWindow { + pub fn from_impl(window: Self) -> Self { + window + } + + pub fn title(&self) -> String { + String::from("n/a") + } + + pub fn rect(&self) -> Rect { + let origin = self.position.unwrap_or((0, 0)); + let size = self.size.unwrap_or((0, 0)); + Rect { + origin: crate::prelude::Point { + x: origin.0 as f64, + y: origin.1 as f64, + }, + size: crate::prelude::Size { + width: size.0 as f64, + height: size.1 as f64, + }, + } + } + + pub fn application(&self) -> WaylandCapturableApplication { + WaylandCapturableApplication(()) + } + + pub fn is_visible(&self) -> bool { + true + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Hash)] +pub struct WaylandCapturableDisplay { + pub pw_node_id: u32, + pub position: Option<(i32, i32)>, + pub size: Option<(i32, i32)>, + pub id: Option, + pub mapping_id: Option, + pub cursor_as_metadata: bool, +} + +impl WaylandCapturableDisplay { + pub fn from_impl(window: Self) -> Self { + window + } + + pub fn rect(&self) -> Rect { + let origin = self.position.unwrap_or((0, 0)); + let size = self.size.unwrap_or((0, 0)); + Rect { + origin: crate::prelude::Point { + x: origin.0 as f64, + y: origin.1 as f64, + }, + size: crate::prelude::Size { + width: size.0 as f64, + height: size.1 as f64, + }, + } + } +} + +pub struct WaylandCapturableApplication(()); + +impl WaylandCapturableApplication { + pub fn identifier(&self) -> String { + String::from("n/a") + } + + pub fn name(&self) -> String { + String::from("n/a") + } + + pub fn pid(&self) -> i32 { + -1 + } +} + +pub struct WaylandCapturableContent { + pub windows: Vec, + pub displays: Vec, + _sc: Rc>, + _sc_session: Rc>>, +} + +impl WaylandCapturableContent { + fn source_types_filter(filter: CapturableContentFilter) -> BitFlags { + let mut bitflags = BitFlags::empty(); + if filter.displays { + bitflags |= SourceType::Monitor | SourceType::Virtual; + } + if let Some(windows_filter) = filter.windows { + if windows_filter.desktop_windows || windows_filter.onscreen_only { + bitflags |= SourceType::Window; + } + } + bitflags + } + + pub async fn new(filter: CapturableContentFilter) -> Result { + let screencast = Screencast::new() + .await + .map_err(|e| CapturableContentError::Other(e.to_string()))?; + + // TODO: Fix dual cursor in kwin when capture monitor and cursor as metadata + // let cursor_as_metadata = screencast + // .available_cursor_modes() + // .await + // .map_err(|e| CapturableContentError::Other(e.to_string()))? + // .contains(CursorMode::Metadata); + let cursor_as_metadata = false; + + let source_types = Self::source_types_filter(filter) + // Some portal implementations freak out when we include supported an not supported source types + & screencast.available_source_types().await.map_err(|e| CapturableContentError::Other(e.to_string()))?; + + if source_types.is_empty() { + return Err(CapturableContentError::Other( + "Unsupported content filter".to_string(), + )); + } + + let session = screencast + .create_session() + .await + .map_err(|e| CapturableContentError::Other(e.to_string()))?; + + // INVESTIGATE: Show cursor as default when metadata-mode is not available? + let cursor_mode = if cursor_as_metadata { + CursorMode::Metadata + } else { + CursorMode::Embedded + }; + + screencast + .select_sources( + &session, + cursor_mode, + source_types, + false, + None, + ashpd::desktop::PersistMode::DoNot, + ) + .await + .map_err(|e| CapturableContentError::Other(e.to_string()))? + .response() + .map_err(|e| CapturableContentError::Other(e.to_string()))?; + let streams = screencast + .start(&session, &ashpd::WindowIdentifier::None) + .await + .map_err(|e| CapturableContentError::Other(e.to_string()))? + .response() + .map_err(|e| CapturableContentError::Other(e.to_string()))?; + + let mut sources = Self { + windows: Vec::new(), + displays: Vec::new(), + _sc: Rc::new(screencast), + _sc_session: Rc::new(session), + }; + for stream in streams.streams() { + if let Some(source_type) = stream.source_type() { + match source_type { + SourceType::Window | SourceType::Virtual => { + sources.windows.push(WaylandCapturableWindow { + pw_node_id: stream.pipe_wire_node_id(), + position: stream.position(), + size: stream.size(), + id: stream.id().map(|id| id.to_string()), + mapping_id: stream.mapping_id().map(|id| id.to_string()), + virt: source_type == SourceType::Virtual, + cursor_as_metadata, + }); + continue; + } + SourceType::Monitor => {} + } + } + // If the stream source_type is `None`, then treat it as a display + sources.displays.push(WaylandCapturableDisplay { + pw_node_id: stream.pipe_wire_node_id(), + position: stream.position(), + size: stream.size(), + id: stream.id().map(|id| id.to_string()), + mapping_id: stream.mapping_id().map(|id| id.to_string()), + cursor_as_metadata, + }); + } + + Ok(sources) + } +} + +#[derive(Clone, Default)] +pub(crate) struct WaylandCapturableContentFilter(()); + +impl WaylandCapturableContentFilter { + pub(crate) const DEFAULT: Self = Self(()); + pub(crate) const NORMAL_WINDOWS: Self = Self(()); +} + +#[cfg(test)] +mod tests { + use ashpd::{desktop::screencast::SourceType, enumflags2::BitFlags}; + + use crate::{ + platform::platform_impl::{ + capture_content::WaylandCapturableContent, ImplCapturableContentFilter, + }, + prelude::{CapturableContentFilter, CapturableWindowFilter}, + }; + + #[test] + fn source_type_filter_conversion_displays() { + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: None, + displays: true, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + SourceType::Monitor | SourceType::Virtual + ); + } + + #[test] + fn source_type_filter_conversion_windows() { + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: Some(CapturableWindowFilter { + desktop_windows: true, + onscreen_only: false + }), + displays: false, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + SourceType::Window + ); + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: Some(CapturableWindowFilter { + desktop_windows: false, + onscreen_only: true + }), + displays: false, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + SourceType::Window + ); + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: Some(CapturableWindowFilter { + desktop_windows: true, + onscreen_only: true + }), + displays: false, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + SourceType::Window + ); + } + + #[test] + fn source_type_filter_conversion_none() { + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: None, + displays: false, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + BitFlags::empty() + ); + } + + #[test] + fn source_type_filter_conversion_all() { + assert_eq!( + WaylandCapturableContent::source_types_filter(CapturableContentFilter { + windows: Some(CapturableWindowFilter { + desktop_windows: true, + onscreen_only: true + }), + displays: true, + impl_capturable_content_filter: ImplCapturableContentFilter::default(), + }), + SourceType::Monitor | SourceType::Virtual | SourceType::Window + ); + } +} diff --git a/src/platform/linux_wayland/capture_stream.rs b/src/platform/linux_wayland/capture_stream.rs new file mode 100644 index 00000000..bcb81a1b --- /dev/null +++ b/src/platform/linux_wayland/capture_stream.rs @@ -0,0 +1,673 @@ +use std::{ + cell::RefCell, + ffi::{c_void, CString}, + mem::size_of, + rc::Rc, + sync::{ + atomic::{AtomicBool, Ordering}, + mpsc::Sender, + Arc, + }, + thread::JoinHandle, + time::Duration, +}; + +use pipewire::{ + context::Context, + main_loop::MainLoop, + spa::{ + self, + param::{ + self, + format::{self, MediaSubtype, MediaType}, + video::{VideoFormat, VideoInfoRaw}, + ParamType, + }, + pod::{self, Object, Pod, Property}, + sys::{ + spa_buffer, spa_meta_bitmap, spa_meta_cursor, spa_meta_header, SPA_META_Cursor, + SPA_META_Header, SPA_PARAM_META_size, SPA_PARAM_META_type, SPA_LOG_LEVEL_TRACE, + }, + utils::{ChoiceFlags, Direction, Fraction, Rectangle, SpaTypes}, + }, + stream::{Stream, StreamFlags, StreamRef, StreamState}, +}; + +use crate::{ + frame::VideoFrame, + prelude::{ + CaptureConfig, CapturePixelFormat, StreamCreateError, StreamError, StreamEvent, + StreamStopError, + }, +}; + +use super::frame::WaylandVideoFrame; + +const INVALID_CURSOR_ID: u32 = 0; + +macro_rules! cursor_metadata_size { + ($w:expr, $h:expr) => { + (size_of::() + size_of::() + $w * $h * 4) as i32 + }; +} + +fn serialize_pod_object(obj: Object) -> Result, StreamCreateError> { + let vals: Vec = pod::serialize::PodSerializer::serialize( + std::io::Cursor::new(Vec::new()), + &pod::Value::Object(obj), + ) + .map_err(|e| StreamCreateError::Other(e.to_string()))? + .0 + .into_inner(); + + Ok(vals) +} + +#[derive(Debug, PartialEq)] +struct CursorBitmap { + pub format: VideoFormat, + pub data: Vec, + pub width: u32, + pub height: u32, + pub bytes_per_pixel: usize, +} + +#[derive(Default, Debug, PartialEq)] +struct PwMetas<'a> { + pub header: Option<&'a spa_meta_header>, + pub cursor: Option<&'a spa_meta_cursor>, + pub cursor_bitmap: Option, +} + +impl<'a> PwMetas<'a> { + pub unsafe fn from_raw(raw: &'a *mut spa_buffer) -> Self { + let mut metas = Self::default(); + for meta in std::slice::from_raw_parts((*(*raw)).metas, (*(*raw)).n_metas as usize) { + match meta.type_ { + #[allow(non_upper_case_globals)] + SPA_META_Header => { + metas.header = Some(&*(meta.data as *const spa_meta_header)); + } + #[allow(non_upper_case_globals)] + SPA_META_Cursor => { + let cursor = &*(meta.data as *const spa_meta_cursor); + // Cursor bitmap are only sent when the cursor sprite is different from the previous + if cursor.id != INVALID_CURSOR_ID && cursor.bitmap_offset > 0 { + let bitmap = (cursor as *const spa_meta_cursor) + .byte_offset(cursor.bitmap_offset as isize) + as *const spa_meta_bitmap; + let bitmap_data = std::slice::from_raw_parts( + (bitmap as *const u8).byte_offset((*bitmap).offset as isize), + (*bitmap).size.height as usize * (*bitmap).stride as usize, + ); + metas.cursor_bitmap = Some(CursorBitmap { + format: param::video::VideoFormat((*bitmap).format), + data: bitmap_data.to_vec(), + width: (*bitmap).size.width, + height: (*bitmap).size.height, + bytes_per_pixel: (*bitmap).stride as usize / (*bitmap).size.width as usize, + }); + } + + metas.cursor = Some(cursor); + } + _ => {} + } + } + + metas + } +} + +#[derive(Default)] +struct PwDatas<'a> { + pub data: &'a [u8], +} + +impl<'a> PwDatas<'a> { + pub unsafe fn from_raw(raw: &'a *mut spa_buffer) -> Vec> { + std::slice::from_raw_parts((*(*raw)).datas, (*(*raw)).n_datas as usize) + .iter() + .map(|data| PwDatas { + data: std::slice::from_raw_parts(data.data as *mut u8, data.maxsize as usize), + }) + .collect::>>() + } +} + +struct WaylandCapturerUD { + pub format: VideoInfoRaw, + pub format_negotiating: Rc>, + pub show_cursor_as_metadata: bool, + pub start_time: i64, + pub callback: Box) + Send + 'static>, + pub should_run: Arc, + pub cursor_bitmap: Option, +} + +pub struct WaylandCaptureStream { + handle: Option>, + should_run: Arc, +} + +impl WaylandCaptureStream { + pub fn supported_pixel_formats() -> &'static [CapturePixelFormat] { + &[CapturePixelFormat::Bgra8888] + } + + pub fn check_access(_borderless: bool) -> Option { + Some(WaylandCaptureAccessToken(())) + } + + pub async fn request_access(_borderless: bool) -> Option { + Some(WaylandCaptureAccessToken(())) + } + + fn pod_supported_pixel_formats() -> pod::Property { + pod::property!( + format::FormatProperties::VideoFormat, + Choice, + Enum, + Id, + VideoFormat::BGRx, // Big-endian + VideoFormat::BGRA, // Big-endian + VideoFormat::RGBA, // Big-endian + VideoFormat::RGBx, // Big-endian + ) + } + + fn pod_supported_resolutions() -> pod::Property { + pod::property!( + format::FormatProperties::VideoSize, + Choice, + Range, + Rectangle, + Rectangle { + width: 512, + height: 512 + }, + Rectangle { + width: 1, + height: 1 + }, + Rectangle { + width: 15360, + height: 8640 + } + ) + } + + fn pod_supported_framerates() -> pod::Property { + pod::property!( + format::FormatProperties::VideoFramerate, + Choice, + Range, + Fraction, + Fraction { num: 30, denom: 1 }, + Fraction { num: 0, denom: 1 }, + Fraction { num: 512, denom: 1 } + ) + } + + fn state_changed( + _stream: &StreamRef, + ud: &mut WaylandCapturerUD, + _old: StreamState, + new: StreamState, + ) { + match new { + StreamState::Error(e) => { + (*ud.callback)(Err(StreamError::Other(e))); + ud.should_run.store(false, Ordering::SeqCst); + } + StreamState::Unconnected => { + (*ud.callback)(Ok(StreamEvent::End)); + ud.should_run.store(false, Ordering::SeqCst); + } + _ => {} + } + } + + fn param_changed(stream: &StreamRef, ud: &mut WaylandCapturerUD, id: u32, param: Option<&Pod>) { + let Some(param) = param else { + return; + }; + + use pipewire::spa::param; + if id != ParamType::Format.as_raw() { + return; + } + + match param::format_utils::parse_format(param) { + Ok((media_type, media_subtype)) => { + if media_type != MediaType::Video || media_subtype != MediaSubtype::Raw { + return; + } + } + Err(e) => { + unsafe { + pipewire::sys::pw_stream_set_error( + stream.as_raw_ptr(), + -1, + CString::new(e.to_string()).unwrap().as_c_str().as_ptr(), + ); + } + return; + } + }; + + let mut params = Vec::new(); + + let mcursor_obj = pod::object!( + SpaTypes::ObjectParamMeta, + ParamType::Meta, + Property::new( + SPA_PARAM_META_type, + pod::Value::Id(spa::utils::Id(SPA_META_Cursor)) + ), + Property::new( + SPA_PARAM_META_size, + pod::Value::Choice(pod::ChoiceValue::Int(spa::utils::Choice::( + ChoiceFlags::empty(), + spa::utils::ChoiceEnum::Range { + default: cursor_metadata_size!(64, 64), + min: cursor_metadata_size!(1, 1), + max: cursor_metadata_size!(512, 512) + } + ))) + ) + ); + let mcursor_values = serialize_pod_object(mcursor_obj).unwrap(); + params.push(pod::Pod::from_bytes(&mcursor_values).unwrap()); + + let mheader_obj = pod::object!( + SpaTypes::ObjectParamMeta, + ParamType::Meta, + Property::new( + SPA_PARAM_META_type, + pod::Value::Id(spa::utils::Id(SPA_META_Header)) + ), + Property::new( + SPA_PARAM_META_size, + pod::Value::Int(size_of::() as i32) + ), + ); + let mheader_values = serialize_pod_object(mheader_obj).unwrap(); + params.push(pod::Pod::from_bytes(&mheader_values).unwrap()); + + if let Err(e) = stream.update_params(&mut params) { + unsafe { + pipewire::sys::pw_stream_set_error( + stream.as_raw_ptr(), + -1, + CString::new(e.to_string()).unwrap().as_c_str().as_ptr(), + ); + } + return; + } + + ud.format.parse(param).unwrap(); + println!( + // DEBUGGING + "Got pixel format: {} ({:?})", + ud.format.format().as_raw(), + ud.format.format() + ); + + ud.format_negotiating.replace(false); + } + + fn process(stream: &StreamRef, ud: &mut WaylandCapturerUD) { + let raw_buffer = unsafe { stream.dequeue_raw_buffer() }; + if raw_buffer.is_null() { + unsafe { stream.queue_raw_buffer(raw_buffer) }; + return; + } + + let buffer = unsafe { (*raw_buffer).buffer }; + if buffer.is_null() { + unsafe { stream.queue_raw_buffer(raw_buffer) }; + return; + } + + let (metas, datas) = unsafe { (PwMetas::from_raw(&buffer), PwDatas::from_raw(&buffer)) }; + if let (Some(header), Some(data)) = (metas.header, datas.first()) { + if ud.start_time == 0 { + ud.start_time = header.pts; + } + + if metas.cursor_bitmap.is_some() { + ud.cursor_bitmap = metas.cursor_bitmap; + } + + // Very expensive + // let mut pixel_data = data.data.to_vec(); + // 'out: { + // if ud.show_cursor_as_metadata { + // if let (Some(cursor), Some(bitmap)) = (metas.cursor, ud.cursor_bitmap.as_ref()) + // { + // if cursor.position.y < 0 + // || cursor.position.x < 0 + // || bitmap.format == ud.format.format() { + // break 'out; + // } + + // TODO: Use cursor.hotspot + // let mut bmap_iter = bitmap.data.iter(); + // let mut h = cursor.position.y as usize; + // let mut height_max = std::cmp::min( + // cursor.position.y as usize + bitmap.height as usize, + // ud.format.size().height as usize, + // ); + // let mut width_max = std::cmp::min( + // cursor.position.x as usize + bitmap.width as usize, + // ud.format.size().width as usize, + // ); + // let stride = 4 * ud.format.size().width as usize; + // while h < height_max { + // let mut w = cursor.position.x as usize; + // while w < width_max { + // if bitmap.bytes_per_pixel != 4 { + // continue; + // } + // let pix_index = h as usize * stride + w as usize * 4; + // let b = *bmap_iter.next().unwrap(); + // let g = *bmap_iter.next().unwrap(); + // let r = *bmap_iter.next().unwrap(); + // let a = *bmap_iter.next().unwrap(); + // if a > 0 { + // pixel_data[pix_index + 0] = b; + // pixel_data[pix_index + 1] = g; + // pixel_data[pix_index + 2] = r; + // pixel_data[pix_index + 3] = a; + // } + // w += 1; + // } + + // h += 1; + // } + // } + // } + // } + + let frame = WaylandVideoFrame { + size: crate::prelude::Size { + width: ud.format.size().width as f64, + height: ud.format.size().height as f64, + }, + id: header.seq, + captured: std::time::Instant::now(), + pts: std::time::Duration::from_nanos((header.pts - ud.start_time) as u64), + format: ud.format, + data: data.data.as_ptr() as *const c_void, + }; + + (*ud.callback)(Ok(StreamEvent::Video(VideoFrame { + impl_video_frame: frame, + }))); + } + + unsafe { stream.queue_raw_buffer(raw_buffer) }; + } + + fn capture_main( + capture_config: CaptureConfig, + callback: Box) + Send + 'static>, + should_run: Arc, + init_tx: &Sender>, + ) -> Result<(), StreamCreateError> { + pipewire::init(); + unsafe { + // DEBUGGING + pipewire::sys::pw_log_set_level(SPA_LOG_LEVEL_TRACE); + } + + let mainloop = MainLoop::new(None).map_err(|e| StreamCreateError::Other(e.to_string()))?; + let context = + Context::new(&mainloop).map_err(|e| StreamCreateError::Other(e.to_string()))?; + let core = context + .connect(None) + .map_err(|e| StreamCreateError::Other(e.to_string()))?; + + use pipewire::keys; + let stream = Stream::new( + &core, + "crabgrab", + pipewire::properties::properties! { + *keys::MEDIA_TYPE => "Video", + *keys::MEDIA_CATEGORY => "Capture", + *keys::MEDIA_ROLE => "Screen", + }, + ) + .map_err(|e| StreamCreateError::Other(e.to_string()))?; + + let format_negotiating = Rc::new(RefCell::new(true)); + let user_data = WaylandCapturerUD { + format: Default::default(), + format_negotiating: format_negotiating.clone(), + show_cursor_as_metadata: capture_config.show_cursor + && match &capture_config.target { + crate::prelude::Capturable::Window(w) => { + w.impl_capturable_window.cursor_as_metadata + } + crate::prelude::Capturable::Display(d) => { + d.impl_capturable_display.cursor_as_metadata + } + }, + start_time: 0, + callback, + should_run: Arc::clone(&should_run), + cursor_bitmap: None, + }; + + let _listener = stream + .add_local_listener_with_user_data(user_data) + .state_changed(Self::state_changed) + .param_changed(Self::param_changed) + .process(Self::process) + .register() + .map_err(|e| StreamCreateError::Other(e.to_string()))?; + + // TODO: Accept DMA buffers + let stream_param_obj = pod::object!( + spa::utils::SpaTypes::ObjectParamFormat, + param::ParamType::EnumFormat, + pod::property!( + format::FormatProperties::MediaType, + Id, + format::MediaType::Video + ), + pod::property!( + format::FormatProperties::MediaSubtype, + Id, + format::MediaSubtype::Raw + ), + Self::pod_supported_pixel_formats(), + Self::pod_supported_resolutions(), + Self::pod_supported_framerates(), + ); + + let param_obj_values = serialize_pod_object(stream_param_obj)?; + let mut params = [pod::Pod::from_bytes(¶m_obj_values).unwrap()]; + + stream + .connect( + Direction::Input, + Some(match capture_config.target { + crate::prelude::Capturable::Window(w) => w.impl_capturable_window.pw_node_id, + crate::prelude::Capturable::Display(d) => d.impl_capturable_display.pw_node_id, + }), + StreamFlags::AUTOCONNECT | StreamFlags::MAP_BUFFERS, + &mut params, + ) + .map_err(|e| StreamCreateError::Other(e.to_string()))?; + + let loop_ = mainloop.loop_(); + + // Iterate the stream and check for errors while negotiating pixel format + while *(*format_negotiating).borrow() { + loop_.iterate(Duration::from_millis(100)); + } + + if !should_run.load(Ordering::Acquire) { + return Err(StreamCreateError::UnsupportedPixelFormat); + } + + init_tx.send(Ok(())).unwrap(); + + while should_run.load(Ordering::Acquire) { + loop_.iterate(Duration::from_millis(100)); + } + + let _ = stream.disconnect(); + + Ok(()) + } + + pub fn new( + _token: WaylandCaptureAccessToken, + capture_config: CaptureConfig, + callback: Box) + Send + 'static>, + ) -> Result { + let should_run = Arc::new(AtomicBool::new(true)); + let should_run_clone = Arc::clone(&should_run); + let (init_tx, init_rx) = std::sync::mpsc::channel::>(); + let handle = std::thread::spawn(move || { + if let Err(e) = Self::capture_main(capture_config, callback, should_run_clone, &init_tx) + { + init_tx.send(Err(e)).unwrap(); + } + }); + + init_rx.recv().unwrap()?; + + Ok(Self { + handle: Some(handle), + should_run, + }) + } + + pub(crate) fn stop(&mut self) -> Result<(), StreamStopError> { + if self.should_run.load(Ordering::Acquire) { + self.should_run.store(false, Ordering::SeqCst); + } + if let Some(handle) = self.handle.take() { + let _ = handle.join(); + } + Ok(()) + } +} + +impl Drop for WaylandCaptureStream { + fn drop(&mut self) { + let _ = self.stop(); + } +} + +#[derive(Clone, Debug)] +pub struct WaylandCaptureConfig {} + +impl WaylandCaptureConfig { + pub fn new() -> Self { + Self {} + } +} + +#[allow(dead_code)] +pub struct WaylandPixelFormat {} + +#[derive(Clone, Copy, Debug)] +pub struct WaylandCaptureAccessToken(()); + +impl WaylandCaptureAccessToken { + pub(crate) fn allows_borderless(&self) -> bool { + false + } +} + +#[cfg(test)] +mod tests { + use std::ffi::c_void; + + use pipewire::spa::sys::spa_meta; + + use super::*; + + #[test] + fn buffer_metas_extraction_header() { + let mut meta_header_data = spa_meta_header { + flags: 1, + offset: 2, + pts: 3, + dts_offset: 4, + seq: 5, + }; + let mut metas = [spa_meta { + type_: SPA_META_Header, + size: size_of_val(&meta_header_data) as u32, + data: std::ptr::addr_of_mut!(meta_header_data) as *mut c_void, + }]; + let mut buffer = spa_buffer { + n_metas: metas.len() as u32, + n_datas: 0, + metas: std::ptr::addr_of_mut!(metas) as *mut spa_meta, + datas: std::ptr::null_mut(), + }; + let buffer_addr = std::ptr::addr_of_mut!(buffer); + let extracted_metas = unsafe { PwMetas::from_raw(&buffer_addr) }; + assert_eq!( + extracted_metas, + PwMetas { + header: Some(&meta_header_data), + cursor: None, + cursor_bitmap: None + } + ); + } + + #[test] + fn buffer_metas_extraction_header_cursor() { + let mut meta_header_data = spa_meta_header { + flags: 1, + offset: 2, + pts: 3, + dts_offset: 4, + seq: 5, + }; + let mut meta_cursor_data = spa_meta_cursor { + id: 0, + flags: 123, + position: spa::sys::spa_point { x: 10, y: 12 }, + hotspot: spa::sys::spa_point { x: 20, y: 22 }, + bitmap_offset: 321, + }; + let mut metas = [ + spa_meta { + type_: SPA_META_Header, + size: size_of_val(&meta_header_data) as u32, + data: std::ptr::addr_of_mut!(meta_header_data) as *mut c_void, + }, + spa_meta { + type_: SPA_META_Cursor, + size: size_of_val(&meta_cursor_data) as u32, + data: std::ptr::addr_of_mut!(meta_cursor_data) as *mut c_void, + }, + ]; + let mut buffer = spa_buffer { + n_metas: metas.len() as u32, + n_datas: 0, + metas: std::ptr::addr_of_mut!(metas) as *mut spa_meta, + datas: std::ptr::null_mut(), + }; + let buffer_addr = std::ptr::addr_of_mut!(buffer); + let extracted_metas = unsafe { PwMetas::from_raw(&buffer_addr) }; + assert_eq!( + extracted_metas, + PwMetas { + header: Some(&meta_header_data), + cursor: Some(&meta_cursor_data), + cursor_bitmap: None + } + ); + } +} diff --git a/src/platform/linux_wayland/frame.rs b/src/platform/linux_wayland/frame.rs new file mode 100644 index 00000000..f59d79d5 --- /dev/null +++ b/src/platform/linux_wayland/frame.rs @@ -0,0 +1,44 @@ +use std::ffi::c_void; + +use pipewire::spa::param::video::VideoInfoRaw; + +use crate::prelude::VideoCaptureFrame; + +pub(crate) struct WaylandVideoFrame { + pub(crate) size: crate::prelude::Size, + pub(crate) id: u64, + pub(crate) captured: std::time::Instant, + pub(crate) pts: std::time::Duration, + pub(crate) format: VideoInfoRaw, + pub(crate) data: *const c_void, +} + +impl VideoCaptureFrame for WaylandVideoFrame { + fn size(&self) -> crate::prelude::Size { + self.size + } + + fn dpi(&self) -> f64 { + todo!() + } + + fn duration(&self) -> std::time::Duration { + std::time::Duration::from_secs(0) + } + + fn origin_time(&self) -> std::time::Duration { + self.pts + } + + fn capture_time(&self) -> std::time::Instant { + self.captured + } + + fn frame_id(&self) -> u64 { + self.id + } + + fn content_rect(&self) -> crate::prelude::Rect { + todo!() + } +} diff --git a/src/platform/linux_wayland/mod.rs b/src/platform/linux_wayland/mod.rs new file mode 100644 index 00000000..403084d3 --- /dev/null +++ b/src/platform/linux_wayland/mod.rs @@ -0,0 +1,59 @@ +mod capture_content; +mod capture_stream; +mod frame; + +pub(crate) use capture_stream::WaylandCaptureAccessToken as ImplCaptureAccessToken; +pub(crate) use capture_stream::WaylandCaptureConfig as ImplCaptureConfig; +pub(crate) use capture_stream::WaylandCaptureStream as ImplCaptureStream; +#[allow(unused_imports)] +pub(crate) use capture_stream::WaylandPixelFormat as ImplPixelFormat; + +pub(crate) use frame::WaylandVideoFrame as ImplVideoFrame; + +pub(crate) use capture_content::WaylandCapturableApplication as ImplCapturableApplication; +pub(crate) use capture_content::WaylandCapturableContent as ImplCapturableContent; +pub(crate) use capture_content::WaylandCapturableContentFilter as ImplCapturableContentFilter; +pub(crate) use capture_content::WaylandCapturableDisplay as ImplCapturableDisplay; +pub(crate) use capture_content::WaylandCapturableWindow as ImplCapturableWindow; + +#[derive(Clone, Debug)] +pub(crate) struct ImplAudioCaptureConfig {} + +impl ImplAudioCaptureConfig { + pub fn new() -> Self { + Self {} + } +} + +use crate::prelude::AudioCaptureFrame; + +pub(crate) struct ImplAudioFrame; + +impl AudioCaptureFrame for ImplAudioFrame { + fn sample_rate(&self) -> crate::prelude::AudioSampleRate { + todo!() + } + + fn channel_count(&self) -> crate::prelude::AudioChannelCount { + todo!() + } + + fn audio_channel_buffer( + &mut self, + _channel: usize, + ) -> Result, crate::prelude::AudioBufferError> { + todo!() + } + + fn duration(&self) -> std::time::Duration { + todo!() + } + + fn origin_time(&self) -> std::time::Duration { + todo!() + } + + fn frame_id(&self) -> u64 { + todo!() + } +} diff --git a/src/platform/mod.rs b/src/platform/mod.rs index b2f0d4d3..17c54a1a 100644 --- a/src/platform/mod.rs +++ b/src/platform/mod.rs @@ -10,6 +10,10 @@ pub(crate) use macos as platform_impl; pub mod windows; #[cfg(target_os = "windows")] -pub(crate) use windows as platform_impl; +pub(crate) use windows as platform_impl; +#[cfg(target_os = "linux")] +pub mod linux_wayland; +#[cfg(target_os = "linux")] +pub(crate) use linux_wayland as platform_impl;