waycap_rs/encoders/
vaapi_encoder.rs

1use std::{any::Any, ptr::null_mut};
2
3use drm_fourcc::DrmFourcc;
4use ffmpeg_next::{
5    self as ffmpeg,
6    ffi::{
7        av_buffer_create, av_buffer_default_free, av_buffer_ref, av_buffer_unref,
8        av_hwframe_ctx_init, AVDRMFrameDescriptor, AVHWDeviceContext, AVHWFramesContext,
9        AVPixelFormat,
10    },
11    Rational,
12};
13use ringbuf::{
14    traits::{Producer, Split},
15    HeapCons, HeapProd, HeapRb,
16};
17
18use crate::types::{
19    config::QualityPreset,
20    error::{Result, WaycapError},
21    video_frame::{EncodedVideoFrame, RawVideoFrame},
22};
23
24use super::video::{create_hw_device, create_hw_frame_ctx, VideoEncoder, GOP_SIZE};
25
26pub struct VaapiEncoder {
27    encoder: Option<ffmpeg::codec::encoder::Video>,
28    width: u32,
29    height: u32,
30    encoder_name: String,
31    quality: QualityPreset,
32    encoded_frame_recv: Option<HeapCons<EncodedVideoFrame>>,
33    encoded_frame_sender: Option<HeapProd<EncodedVideoFrame>>,
34    filter_graph: Option<ffmpeg::filter::Graph>,
35}
36
37impl VideoEncoder for VaapiEncoder {
38    fn new(width: u32, height: u32, quality: QualityPreset) -> Result<Self>
39    where
40        Self: Sized,
41    {
42        let encoder_name = "h264_vaapi";
43        let encoder = Self::create_encoder(width, height, encoder_name, &quality)?;
44        let video_ring_buffer = HeapRb::<EncodedVideoFrame>::new(120);
45        let (video_ring_sender, video_ring_receiver) = video_ring_buffer.split();
46        let filter_graph = Some(Self::create_filter_graph(&encoder, width, height)?);
47
48        Ok(Self {
49            encoder: Some(encoder),
50            width,
51            height,
52            encoder_name: encoder_name.to_string(),
53            quality,
54            encoded_frame_recv: Some(video_ring_receiver),
55            encoded_frame_sender: Some(video_ring_sender),
56            filter_graph,
57        })
58    }
59
60    fn as_any(&self) -> &dyn Any {
61        self
62    }
63
64    fn process(&mut self, frame: &RawVideoFrame) -> Result<()> {
65        if let Some(ref mut encoder) = self.encoder {
66            if let Some(fd) = frame.dmabuf_fd {
67                let mut drm_frame = ffmpeg::util::frame::Video::new(
68                    ffmpeg_next::format::Pixel::DRM_PRIME,
69                    encoder.width(),
70                    encoder.height(),
71                );
72                unsafe {
73                    // Create DRM descriptor that points to the DMA buffer
74                    let drm_desc =
75                        Box::into_raw(Box::new(std::mem::zeroed::<AVDRMFrameDescriptor>()));
76
77                    (*drm_desc).nb_objects = 1;
78                    (*drm_desc).objects[0].fd = fd;
79                    (*drm_desc).objects[0].size = 0;
80                    (*drm_desc).objects[0].format_modifier = 0;
81
82                    (*drm_desc).nb_layers = 1;
83                    (*drm_desc).layers[0].format = DrmFourcc::Argb8888 as u32;
84                    (*drm_desc).layers[0].nb_planes = 1;
85                    (*drm_desc).layers[0].planes[0].object_index = 0;
86                    (*drm_desc).layers[0].planes[0].offset = frame.offset as isize;
87                    (*drm_desc).layers[0].planes[0].pitch = frame.stride as isize;
88
89                    // Attach descriptor to frame
90                    (*drm_frame.as_mut_ptr()).data[0] = drm_desc as *mut u8;
91                    (*drm_frame.as_mut_ptr()).buf[0] = av_buffer_create(
92                        drm_desc as *mut u8,
93                        std::mem::size_of::<AVDRMFrameDescriptor>(),
94                        Some(av_buffer_default_free),
95                        null_mut(),
96                        0,
97                    );
98
99                    (*drm_frame.as_mut_ptr()).hw_frames_ctx =
100                        av_buffer_ref((*encoder.as_ptr()).hw_frames_ctx);
101                }
102
103                drm_frame.set_pts(Some(frame.timestamp));
104                self.filter_graph
105                    .as_mut()
106                    .unwrap()
107                    .get("in")
108                    .unwrap()
109                    .source()
110                    .add(&drm_frame)
111                    .unwrap();
112
113                let mut filtered = ffmpeg::util::frame::Video::empty();
114                if self
115                    .filter_graph
116                    .as_mut()
117                    .unwrap()
118                    .get("out")
119                    .unwrap()
120                    .sink()
121                    .frame(&mut filtered)
122                    .is_ok()
123                {
124                    encoder.send_frame(&filtered)?;
125                }
126            }
127
128            let mut packet = ffmpeg::codec::packet::Packet::empty();
129            if encoder.receive_packet(&mut packet).is_ok() {
130                if let Some(data) = packet.data() {
131                    if let Some(ref mut sender) = self.encoded_frame_sender {
132                        if sender
133                            .try_push(EncodedVideoFrame {
134                                data: data.to_vec(),
135                                is_keyframe: packet.is_key(),
136                                pts: packet.pts().unwrap_or(0),
137                                dts: packet.dts().unwrap_or(0),
138                            })
139                            .is_err()
140                        {
141                            log::error!("Could not send encoded packet to the ringbuf");
142                        }
143                    }
144                };
145            }
146        }
147        Ok(())
148    }
149
150    /// Drain the filter graph and encoder of any remaining frames it is processing
151    fn drain(&mut self) -> Result<()> {
152        if let Some(ref mut encoder) = self.encoder {
153            // Drain the filter graph
154            let mut filtered = ffmpeg::util::frame::Video::empty();
155            while self
156                .filter_graph
157                .as_mut()
158                .unwrap()
159                .get("out")
160                .unwrap()
161                .sink()
162                .frame(&mut filtered)
163                .is_ok()
164            {
165                encoder.send_frame(&filtered)?;
166            }
167
168            // Drain encoder
169            encoder.send_eof()?;
170            let mut packet = ffmpeg::codec::packet::Packet::empty();
171            while encoder.receive_packet(&mut packet).is_ok() {
172                if let Some(data) = packet.data() {
173                    if let Some(ref mut sender) = self.encoded_frame_sender {
174                        if sender
175                            .try_push(EncodedVideoFrame {
176                                data: data.to_vec(),
177                                is_keyframe: packet.is_key(),
178                                pts: packet.pts().unwrap_or(0),
179                                dts: packet.dts().unwrap_or(0),
180                            })
181                            .is_err()
182                        {
183                            log::error!("Could not send encoded packet to the ringbuf");
184                        }
185                    }
186                };
187                packet = ffmpeg::codec::packet::Packet::empty();
188            }
189        }
190        Ok(())
191    }
192
193    fn reset(&mut self) -> Result<()> {
194        self.drop_encoder();
195        let new_encoder =
196            Self::create_encoder(self.width, self.height, &self.encoder_name, &self.quality)?;
197
198        let new_filter_graph = Self::create_filter_graph(&new_encoder, self.width, self.height)?;
199
200        self.encoder = Some(new_encoder);
201        self.filter_graph = Some(new_filter_graph);
202        Ok(())
203    }
204
205    fn get_encoder(&self) -> &Option<ffmpeg::codec::encoder::Video> {
206        &self.encoder
207    }
208
209    fn drop_encoder(&mut self) {
210        self.encoder.take();
211        self.filter_graph.take();
212    }
213
214    fn take_encoded_recv(&mut self) -> Option<HeapCons<EncodedVideoFrame>> {
215        self.encoded_frame_recv.take()
216    }
217}
218
219impl VaapiEncoder {
220    fn create_encoder(
221        width: u32,
222        height: u32,
223        encoder: &str,
224        quality: &QualityPreset,
225    ) -> Result<ffmpeg::codec::encoder::Video> {
226        let encoder_codec =
227            ffmpeg::codec::encoder::find_by_name(encoder).ok_or(ffmpeg::Error::EncoderNotFound)?;
228
229        let mut encoder_ctx = ffmpeg::codec::context::Context::new_with_codec(encoder_codec)
230            .encoder()
231            .video()?;
232
233        encoder_ctx.set_width(width);
234        encoder_ctx.set_height(height);
235        encoder_ctx.set_format(ffmpeg::format::Pixel::VAAPI);
236        // Configuration inspiration from
237        // https://git.dec05eba.com/gpu-screen-recorder/tree/src/capture/xcomposite_drm.c?id=8cbdb596ebf79587a432ed40583630b6cd39ed88
238        let mut vaapi_device =
239            create_hw_device(ffmpeg_next::ffi::AVHWDeviceType::AV_HWDEVICE_TYPE_VAAPI)?;
240        let mut frame_ctx = create_hw_frame_ctx(vaapi_device)?;
241
242        unsafe {
243            let hw_frame_context = &mut *((*frame_ctx).data as *mut AVHWFramesContext);
244            hw_frame_context.width = width as i32;
245            hw_frame_context.height = height as i32;
246            hw_frame_context.sw_format = AVPixelFormat::AV_PIX_FMT_NV12;
247            hw_frame_context.format = encoder_ctx.format().into();
248            hw_frame_context.device_ref = av_buffer_ref(vaapi_device);
249            hw_frame_context.device_ctx = (*vaapi_device).data as *mut AVHWDeviceContext;
250            // Decides buffer size if we do not pop frame from the encoder we cannot
251            // enqueue more than these many -- maybe adjust but for now setting it to
252            // doble target fps
253            hw_frame_context.initial_pool_size = 120;
254
255            let err = av_hwframe_ctx_init(frame_ctx);
256            if err < 0 {
257                return Err(WaycapError::Init(format!(
258                    "Error trying to initialize hw frame context: {:?}",
259                    err
260                )));
261            }
262
263            (*encoder_ctx.as_mut_ptr()).hw_device_ctx = av_buffer_ref(vaapi_device);
264            (*encoder_ctx.as_mut_ptr()).hw_frames_ctx = av_buffer_ref(frame_ctx);
265
266            av_buffer_unref(&mut vaapi_device);
267            av_buffer_unref(&mut frame_ctx);
268        }
269
270        // These should be part of a config file
271        encoder_ctx.set_time_base(Rational::new(1, 1_000_000));
272
273        // Needed to insert I-Frames more frequently so we don't lose full seconds
274        // when popping frames from the front
275        encoder_ctx.set_gop(GOP_SIZE);
276
277        let encoder_params = ffmpeg::codec::Parameters::new();
278
279        let opts = Self::get_encoder_params(quality);
280
281        encoder_ctx.set_parameters(encoder_params)?;
282        let encoder = encoder_ctx.open_with(opts)?;
283        Ok(encoder)
284    }
285
286    fn get_encoder_params(quality: &QualityPreset) -> ffmpeg::Dictionary {
287        let mut opts = ffmpeg::Dictionary::new();
288        opts.set("vsync", "vfr");
289        opts.set("rc", "VBR");
290        match quality {
291            QualityPreset::Low => {
292                opts.set("qp", "30");
293            }
294            QualityPreset::Medium => {
295                opts.set("qp", "25");
296            }
297            QualityPreset::High => {
298                opts.set("qp", "20");
299            }
300            QualityPreset::Ultra => {
301                opts.set("qp", "15");
302            }
303        }
304        opts
305    }
306
307    fn create_filter_graph(
308        encoder: &ffmpeg::codec::encoder::Video,
309        width: u32,
310        height: u32,
311    ) -> Result<ffmpeg::filter::Graph> {
312        let mut graph = ffmpeg::filter::Graph::new();
313
314        let args = format!(
315            "video_size={}x{}:pix_fmt=bgra:time_base=1/1000000",
316            width, height
317        );
318
319        let mut input = graph.add(&ffmpeg::filter::find("buffer").unwrap(), "in", &args)?;
320
321        let mut hwmap = graph.add(
322            &ffmpeg::filter::find("hwmap").unwrap(),
323            "hwmap",
324            "mode=read+write:derive_device=vaapi",
325        )?;
326
327        let scale_args = format!("w={}:h={}:format=nv12:out_range=tv", width, height);
328        let mut scale = graph.add(
329            &ffmpeg::filter::find("scale_vaapi").unwrap(),
330            "scale",
331            &scale_args,
332        )?;
333
334        let mut out = graph.add(&ffmpeg::filter::find("buffersink").unwrap(), "out", "")?;
335        unsafe {
336            let dev = (*encoder.as_ptr()).hw_device_ctx;
337
338            (*hwmap.as_mut_ptr()).hw_device_ctx = av_buffer_ref(dev);
339        }
340
341        input.link(0, &mut hwmap, 0);
342        hwmap.link(0, &mut scale, 0);
343        scale.link(0, &mut out, 0);
344
345        graph.validate()?;
346        log::trace!("VAAPI Graph\n{}", graph.dump());
347
348        Ok(graph)
349    }
350}
351
352impl Drop for VaapiEncoder {
353    fn drop(&mut self) {
354        if let Err(e) = self.drain() {
355            log::error!("Error while draining vaapi encoder during drop: {:?}", e);
356        }
357        self.drop_encoder();
358    }
359}