waycap_rs/encoders/
video.rs1use std::ffi::CString;
2use std::ptr::null_mut;
3use std::sync::Arc;
4use std::time::Duration;
5
6use crate::types::error::{Result, WaycapError};
7use crate::types::video_frame::RawVideoFrame;
8use crate::CaptureControls;
9use crossbeam::channel::Receiver;
10use crossbeam::select;
11use ffmpeg::ffi::{av_hwdevice_ctx_create, av_hwframe_ctx_alloc, AVBufferRef};
12use ffmpeg_next::{self as ffmpeg};
13use pipewire::spa;
14use std::sync::Mutex;
15
16pub const GOP_SIZE: u32 = 30;
17
18pub trait VideoEncoder: Send + 'static {
23 type Output;
24
25 fn reset(&mut self) -> Result<()>;
26 fn output(&mut self) -> Option<Receiver<Self::Output>>;
27 fn drop_processor(&mut self);
28 fn drain(&mut self) -> Result<()>;
29 fn get_encoder(&self) -> &Option<ffmpeg::codec::encoder::Video>;
30}
31
32pub trait StartVideoEncoder: VideoEncoder + Sized {
35 fn start_processing(
36 capture: &mut crate::Capture<Self>,
37 input: Receiver<RawVideoFrame>,
38 ) -> Result<()>;
39}
40
41pub trait ProcessingThread: StartVideoEncoder {
45 fn process(&mut self, frame: RawVideoFrame) -> Result<()>;
48 fn thread_setup(&mut self) -> Result<()> {
49 Ok(())
50 }
51 fn thread_teardown(&mut self) -> Result<()> {
52 Ok(())
53 }
54}
55
56impl<T> StartVideoEncoder for T
58where
59 T: ProcessingThread,
60{
61 fn start_processing(
62 capture: &mut crate::Capture<Self>,
63 input: Receiver<RawVideoFrame>,
64 ) -> Result<()> {
65 let encoder = Arc::clone(
66 capture
67 .video_encoder
68 .as_mut()
69 .expect("start_processing should be called after Capture.video_encoder is set"),
70 );
71 let controls = Arc::clone(&capture.controls);
72
73 let handle = std::thread::spawn(move || -> Result<()> {
74 encoder.as_ref().lock().unwrap().thread_setup()?;
75
76 let ret = default_processing_loop(input, controls, Arc::clone(&encoder));
77
78 encoder.as_ref().lock().unwrap().thread_teardown()?;
79 ret
80 });
81 capture.worker_handles.push(handle);
82 Ok(())
83 }
84}
85
86pub fn default_processing_loop<V: ProcessingThread>(
88 input: Receiver<RawVideoFrame>,
89 controls: Arc<CaptureControls>,
90 thread_self: Arc<Mutex<V>>,
91) -> Result<()> {
92 let mut last_timestamp: u64 = 0;
93 let mut frame_interval = controls.frame_interval_ns();
94
95 while !controls.is_stopped() {
96 if controls.is_paused() {
97 std::thread::sleep(Duration::from_millis(100));
98 continue;
99 }
100 select! {
101 recv(input) -> raw_frame => {
102 match raw_frame {
103 Ok(raw_frame) => {
104 let current_time = raw_frame.timestamp as u64;
105 if current_time >= last_timestamp + frame_interval {
106 thread_self.lock().unwrap().process(raw_frame)?;
107 last_timestamp = current_time;
108 }
109 }
110 Err(_) => {
111 log::info!("Video channel disconnected");
112 break;
113 }
114 }
115 }
116 default(Duration::from_millis(100)) => {
117 frame_interval = controls.frame_interval_ns();
119 }
120 }
121 }
122 Ok(())
123}
124
125pub trait PipewireSPA {
126 fn get_spa_definition() -> Result<spa::pod::Object>;
127}
128
129pub fn create_hw_frame_ctx(device: *mut AVBufferRef) -> Result<*mut AVBufferRef> {
130 unsafe {
131 let frame = av_hwframe_ctx_alloc(device);
132
133 if frame.is_null() {
134 return Err(WaycapError::Init(
135 "Could not create hw frame context".to_string(),
136 ));
137 }
138
139 Ok(frame)
140 }
141}
142
143pub fn create_hw_device(device_type: ffmpeg_next::ffi::AVHWDeviceType) -> Result<*mut AVBufferRef> {
144 unsafe {
145 let mut device: *mut AVBufferRef = null_mut();
146 let device_path = CString::new("/dev/dri/renderD128").unwrap();
147 let ret = av_hwdevice_ctx_create(
148 &mut device,
149 device_type,
150 device_path.as_ptr(),
151 null_mut(),
152 0,
153 );
154 if ret < 0 {
155 return Err(WaycapError::Init(format!(
156 "Failed to create hardware device: Error code {ret:?}",
157 )));
158 }
159
160 Ok(device)
161 }
162}