Skip to main content

waycap_rs/encoders/
video.rs

1use std::sync::Arc;
2use std::time::Duration;
3
4#[cfg(feature = "vaapi")]
5use std::ffi::CString;
6#[cfg(feature = "vaapi")]
7use std::ptr::null_mut;
8
9use crate::types::error::{Result, WaycapError};
10use crate::types::video_frame::RawVideoFrame;
11use crate::CaptureControls;
12use crossbeam::channel::Receiver;
13use crossbeam::select;
14use ffmpeg_next::{self as ffmpeg};
15use ffmpeg::ffi::{av_hwframe_ctx_alloc, AVBufferRef};
16#[cfg(feature = "vaapi")]
17use ffmpeg::ffi::av_hwdevice_ctx_create;
18use pipewire::spa;
19use std::sync::Mutex;
20
21pub const GOP_SIZE: u32 = 30;
22
23/// Base trait for video encoders. defines the output type of an encoder.
24///
25/// To use this, implement either [`ProcessingThread::process`] for processing individual frames on
26/// a separate worker thread, or [`StartVideoEncoder::start_processing`] for custom start logic.
27pub trait VideoEncoder: Send + 'static {
28    type Output;
29
30    fn reset(&mut self) -> Result<()>;
31    fn output(&mut self) -> Option<Receiver<Self::Output>>;
32    fn drop_processor(&mut self);
33    fn drain(&mut self) -> Result<()>;
34    fn get_encoder(&self) -> &Option<ffmpeg::codec::encoder::Video>;
35}
36
37/// Specifies how processing is started for a encoder
38/// For the default processing thread logic, implement [``ProcessingThread``] instead.
39pub trait StartVideoEncoder: VideoEncoder + Sized {
40    fn start_processing(
41        capture: &mut crate::Capture<Self>,
42        input: Receiver<RawVideoFrame>,
43    ) -> Result<()>;
44}
45
46/// Implemented for all VideoEncoders which use a normal processing thread
47///
48/// [`ProcessingThread::process`] will be called with each frame
49pub trait ProcessingThread: StartVideoEncoder {
50    /// Process a single raw frame
51    /// this is called from inside the thread started by self.start
52    fn process(&mut self, frame: RawVideoFrame) -> Result<()>;
53    fn thread_setup(&mut self) -> Result<()> {
54        Ok(())
55    }
56    fn thread_teardown(&mut self) -> Result<()> {
57        Ok(())
58    }
59}
60
61/// Default impl for all VideoEncoders which use a normal processing thread
62impl<T> StartVideoEncoder for T
63where
64    T: ProcessingThread,
65{
66    fn start_processing(
67        capture: &mut crate::Capture<Self>,
68        input: Receiver<RawVideoFrame>,
69    ) -> Result<()> {
70        let encoder = Arc::clone(
71            capture
72                .video_encoder
73                .as_mut()
74                .expect("start_processing should be called after Capture.video_encoder is set"),
75        );
76        let controls = Arc::clone(&capture.controls);
77
78        let handle = std::thread::spawn(move || -> Result<()> {
79            encoder.as_ref().lock().unwrap().thread_setup()?;
80
81            let ret = default_processing_loop(input, controls, Arc::clone(&encoder));
82
83            encoder.as_ref().lock().unwrap().thread_teardown()?;
84            ret
85        });
86        capture.worker_handles.push(handle);
87        Ok(())
88    }
89}
90
91/// Default processing loop function. Handles stop/pause and frame interval changes
92pub fn default_processing_loop<V: ProcessingThread>(
93    input: Receiver<RawVideoFrame>,
94    controls: Arc<CaptureControls>,
95    thread_self: Arc<Mutex<V>>,
96) -> Result<()> {
97    let mut last_timestamp: u64 = 0;
98    let mut frame_interval = controls.frame_interval_ns();
99
100    while !controls.is_stopped() {
101        if controls.is_paused() {
102            std::thread::sleep(Duration::from_millis(100));
103            continue;
104        }
105        select! {
106            recv(input) -> raw_frame => {
107                match raw_frame {
108                    Ok(raw_frame) => {
109                        let current_time = raw_frame.timestamp as u64;
110                        if current_time >= last_timestamp + frame_interval {
111                            thread_self.lock().unwrap().process(raw_frame)?;
112                            last_timestamp = current_time;
113                        }
114                    }
115                    Err(_) => {
116                        log::info!("Video channel disconnected");
117                        break;
118                    }
119                }
120            }
121            default(Duration::from_millis(100)) => {
122                // Timeout to change fps if needed and check stop/pause flags periodically
123                frame_interval = controls.frame_interval_ns();
124            }
125        }
126    }
127    Ok(())
128}
129
130pub trait PipewireSPA {
131    fn get_spa_definition() -> Result<spa::pod::Object>;
132}
133
134pub fn create_hw_frame_ctx(device: *mut AVBufferRef) -> Result<*mut AVBufferRef> {
135    unsafe {
136        let frame = av_hwframe_ctx_alloc(device);
137
138        if frame.is_null() {
139            return Err(WaycapError::Init(
140                "Could not create hw frame context".to_string(),
141            ));
142        }
143
144        Ok(frame)
145    }
146}
147
148#[cfg(feature = "vaapi")]
149pub fn create_hw_device(device_type: ffmpeg_next::ffi::AVHWDeviceType) -> Result<*mut AVBufferRef> {
150    unsafe {
151        let mut device: *mut AVBufferRef = null_mut();
152        let device_path = CString::new("/dev/dri/renderD128").unwrap();
153        let ret = av_hwdevice_ctx_create(
154            &mut device,
155            device_type,
156            device_path.as_ptr(),
157            null_mut(),
158            0,
159        );
160        if ret < 0 {
161            return Err(WaycapError::Init(format!(
162                "Failed to create hardware device: Error code {ret:?}",
163            )));
164        }
165
166        Ok(device)
167    }
168}