waycap_rs/encoders/
vaapi_encoder.rs

1use std::ptr::null_mut;
2
3use crate::{
4    encoders::video::{PipewireSPA, ProcessingThread, VideoEncoder},
5    types::{
6        config::QualityPreset,
7        error::{Result, WaycapError},
8        video_frame::{EncodedVideoFrame, RawVideoFrame},
9    },
10    utils::TIME_UNIT_NS,
11};
12use crossbeam::channel::{bounded, Receiver, Sender};
13use drm_fourcc::DrmFourcc;
14use ffmpeg_next::{
15    self as ffmpeg,
16    ffi::{
17        av_buffer_create, av_buffer_default_free, av_buffer_ref, av_buffer_unref,
18        av_hwframe_ctx_init, AVDRMFrameDescriptor, AVHWDeviceContext, AVHWFramesContext,
19        AVPixelFormat,
20    },
21    Rational,
22};
23use pipewire as pw;
24
25use super::video::{create_hw_device, create_hw_frame_ctx, GOP_SIZE};
26
27/// Encoder which encodes frames using Vaapi
28pub struct VaapiEncoder {
29    encoder: Option<ffmpeg::codec::encoder::Video>,
30    width: u32,
31    height: u32,
32    encoder_name: String,
33    quality: QualityPreset,
34    encoded_frame_recv: Option<Receiver<EncodedVideoFrame>>,
35    encoded_frame_sender: Sender<EncodedVideoFrame>,
36    filter_graph: Option<ffmpeg::filter::Graph>,
37}
38
39impl ProcessingThread for VaapiEncoder {
40    fn process(&mut self, frame: RawVideoFrame) -> Result<()> {
41        if let Some(ref mut encoder) = self.encoder {
42            if let Some(fd) = frame.dmabuf_fd {
43                let mut drm_frame = ffmpeg::util::frame::Video::new(
44                    ffmpeg_next::format::Pixel::DRM_PRIME,
45                    encoder.width(),
46                    encoder.height(),
47                );
48                unsafe {
49                    // Create DRM descriptor that points to the DMA buffer
50                    let drm_desc =
51                        Box::into_raw(Box::new(std::mem::zeroed::<AVDRMFrameDescriptor>()));
52
53                    (*drm_desc).nb_objects = 1;
54                    (*drm_desc).objects[0].fd = fd;
55                    (*drm_desc).objects[0].size = 0;
56                    (*drm_desc).objects[0].format_modifier = 0;
57
58                    (*drm_desc).nb_layers = 1;
59                    (*drm_desc).layers[0].format = DrmFourcc::Argb8888 as u32;
60                    (*drm_desc).layers[0].nb_planes = 1;
61                    (*drm_desc).layers[0].planes[0].object_index = 0;
62                    (*drm_desc).layers[0].planes[0].offset = frame.offset as isize;
63                    (*drm_desc).layers[0].planes[0].pitch = frame.stride as isize;
64
65                    // Attach descriptor to frame
66                    (*drm_frame.as_mut_ptr()).data[0] = drm_desc as *mut u8;
67                    (*drm_frame.as_mut_ptr()).buf[0] = av_buffer_create(
68                        drm_desc as *mut u8,
69                        std::mem::size_of::<AVDRMFrameDescriptor>(),
70                        Some(av_buffer_default_free),
71                        null_mut(),
72                        0,
73                    );
74
75                    (*drm_frame.as_mut_ptr()).hw_frames_ctx =
76                        av_buffer_ref((*encoder.as_ptr()).hw_frames_ctx);
77                }
78
79                drm_frame.set_pts(Some(frame.timestamp));
80                self.filter_graph
81                    .as_mut()
82                    .unwrap()
83                    .get("in")
84                    .unwrap()
85                    .source()
86                    .add(&drm_frame)
87                    .unwrap();
88
89                let mut filtered = ffmpeg::util::frame::Video::empty();
90                if self
91                    .filter_graph
92                    .as_mut()
93                    .unwrap()
94                    .get("out")
95                    .unwrap()
96                    .sink()
97                    .frame(&mut filtered)
98                    .is_ok()
99                {
100                    encoder.send_frame(&filtered)?;
101                }
102            }
103
104            let mut packet = ffmpeg::codec::packet::Packet::empty();
105            if encoder.receive_packet(&mut packet).is_ok() {
106                if let Some(data) = packet.data() {
107                    match self.encoded_frame_sender.try_send(EncodedVideoFrame {
108                        data: data.to_vec(),
109                        is_keyframe: packet.is_key(),
110                        pts: packet.pts().unwrap_or(0),
111                        dts: packet.dts().unwrap_or(0),
112                    }) {
113                        Ok(_) => {}
114                        Err(crossbeam::channel::TrySendError::Full(_)) => {
115                            log::error!("Could not send encoded video frame. Receiver is full");
116                        }
117                        Err(crossbeam::channel::TrySendError::Disconnected(_)) => {
118                            log::error!(
119                                "Could not send encoded video frame. Receiver disconnected"
120                            );
121                        }
122                    }
123                };
124            }
125        }
126        Ok(())
127    }
128}
129
130impl VideoEncoder for VaapiEncoder {
131    type Output = EncodedVideoFrame;
132    fn reset(&mut self) -> Result<()> {
133        self.drop_processor();
134        let new_encoder =
135            Self::create_encoder(self.width, self.height, &self.encoder_name, &self.quality)?;
136
137        let new_filter_graph = Self::create_filter_graph(&new_encoder, self.width, self.height)?;
138
139        self.encoder = Some(new_encoder);
140        self.filter_graph = Some(new_filter_graph);
141        Ok(())
142    }
143
144    fn drop_processor(&mut self) {
145        self.encoder.take();
146        self.filter_graph.take();
147    }
148
149    fn output(&mut self) -> Option<Receiver<EncodedVideoFrame>> {
150        self.encoded_frame_recv.clone()
151    }
152
153    /// Drain the filter graph and encoder of any remaining frames it is processing
154    fn drain(&mut self) -> Result<()> {
155        if let Some(ref mut encoder) = self.encoder {
156            // Drain the filter graph
157            let mut filtered = ffmpeg::util::frame::Video::empty();
158            while self
159                .filter_graph
160                .as_mut()
161                .unwrap()
162                .get("out")
163                .unwrap()
164                .sink()
165                .frame(&mut filtered)
166                .is_ok()
167            {
168                encoder.send_frame(&filtered)?;
169            }
170
171            // Drain encoder
172            encoder.send_eof()?;
173            let mut packet = ffmpeg::codec::packet::Packet::empty();
174            while encoder.receive_packet(&mut packet).is_ok() {} // Discard these frames
175        }
176        Ok(())
177    }
178    fn get_encoder(&self) -> &Option<ffmpeg::codec::encoder::Video> {
179        &self.encoder
180    }
181}
182
183impl PipewireSPA for VaapiEncoder {
184    fn get_spa_definition() -> Result<pw::spa::pod::Object> {
185        Ok(pw::spa::pod::object!(
186            pw::spa::utils::SpaTypes::ObjectParamFormat,
187            pw::spa::param::ParamType::EnumFormat,
188            pw::spa::pod::property!(
189                pw::spa::param::format::FormatProperties::MediaType,
190                Id,
191                pw::spa::param::format::MediaType::Video
192            ),
193            pw::spa::pod::property!(
194                pw::spa::param::format::FormatProperties::MediaSubtype,
195                Id,
196                pw::spa::param::format::MediaSubtype::Raw
197            ),
198            pw::spa::pod::property!(
199                pw::spa::param::format::FormatProperties::VideoModifier,
200                Long,
201                0
202            ),
203            pw::spa::pod::property!(
204                pw::spa::param::format::FormatProperties::VideoFormat,
205                Choice,
206                Enum,
207                Id,
208                pw::spa::param::video::VideoFormat::NV12,
209                pw::spa::param::video::VideoFormat::I420,
210                pw::spa::param::video::VideoFormat::BGRA,
211            ),
212            pw::spa::pod::property!(
213                pw::spa::param::format::FormatProperties::VideoSize,
214                Choice,
215                Range,
216                Rectangle,
217                pw::spa::utils::Rectangle {
218                    width: 2560,
219                    height: 1440
220                }, // Default
221                pw::spa::utils::Rectangle {
222                    width: 1,
223                    height: 1
224                }, // Min
225                pw::spa::utils::Rectangle {
226                    width: 4096,
227                    height: 4096
228                } // Max
229            ),
230            pw::spa::pod::property!(
231                pw::spa::param::format::FormatProperties::VideoFramerate,
232                Choice,
233                Range,
234                Fraction,
235                pw::spa::utils::Fraction { num: 240, denom: 1 }, // Default
236                pw::spa::utils::Fraction { num: 0, denom: 1 },   // Min
237                pw::spa::utils::Fraction { num: 244, denom: 1 }  // Max
238            ),
239        ))
240    }
241}
242
243impl VaapiEncoder {
244    pub(crate) fn new(width: u32, height: u32, quality: QualityPreset) -> Result<Self> {
245        let encoder_name = "h264_vaapi";
246        let encoder = Self::create_encoder(width, height, encoder_name, &quality)?;
247
248        let (frame_tx, frame_rx): (Sender<EncodedVideoFrame>, Receiver<EncodedVideoFrame>) =
249            bounded(10);
250        let filter_graph = Some(Self::create_filter_graph(&encoder, width, height)?);
251
252        Ok(Self {
253            encoder: Some(encoder),
254            width,
255            height,
256            encoder_name: encoder_name.to_string(),
257            quality,
258            encoded_frame_recv: Some(frame_rx),
259            encoded_frame_sender: frame_tx,
260            filter_graph,
261        })
262    }
263
264    fn create_encoder(
265        width: u32,
266        height: u32,
267        encoder: &str,
268        quality: &QualityPreset,
269    ) -> Result<ffmpeg::codec::encoder::Video> {
270        let encoder_codec =
271            ffmpeg::codec::encoder::find_by_name(encoder).ok_or(ffmpeg::Error::EncoderNotFound)?;
272
273        let mut encoder_ctx = ffmpeg::codec::context::Context::new_with_codec(encoder_codec)
274            .encoder()
275            .video()?;
276
277        encoder_ctx.set_width(width);
278        encoder_ctx.set_height(height);
279        encoder_ctx.set_format(ffmpeg::format::Pixel::VAAPI);
280        // Configuration inspiration from
281        // https://git.dec05eba.com/gpu-screen-recorder/tree/src/capture/xcomposite_drm.c?id=8cbdb596ebf79587a432ed40583630b6cd39ed88
282        let mut vaapi_device =
283            create_hw_device(ffmpeg_next::ffi::AVHWDeviceType::AV_HWDEVICE_TYPE_VAAPI)?;
284        let mut frame_ctx = create_hw_frame_ctx(vaapi_device)?;
285
286        unsafe {
287            let hw_frame_context = &mut *((*frame_ctx).data as *mut AVHWFramesContext);
288            hw_frame_context.width = width as i32;
289            hw_frame_context.height = height as i32;
290            hw_frame_context.sw_format = AVPixelFormat::AV_PIX_FMT_NV12;
291            hw_frame_context.format = encoder_ctx.format().into();
292            hw_frame_context.device_ref = av_buffer_ref(vaapi_device);
293            hw_frame_context.device_ctx = (*vaapi_device).data as *mut AVHWDeviceContext;
294            // Decides buffer size if we do not pop frame from the encoder we cannot
295            // keep pushing. Smaller better as we reserve less GPU memory
296            hw_frame_context.initial_pool_size = 2;
297
298            let err = av_hwframe_ctx_init(frame_ctx);
299            if err < 0 {
300                return Err(WaycapError::Init(format!(
301                    "Error trying to initialize hw frame context: {err:?}",
302                )));
303            }
304
305            (*encoder_ctx.as_mut_ptr()).hw_device_ctx = av_buffer_ref(vaapi_device);
306            (*encoder_ctx.as_mut_ptr()).hw_frames_ctx = av_buffer_ref(frame_ctx);
307
308            av_buffer_unref(&mut vaapi_device);
309            av_buffer_unref(&mut frame_ctx);
310        }
311
312        // These should be part of a config file
313        encoder_ctx.set_time_base(Rational::new(1, TIME_UNIT_NS as i32));
314
315        // Needed to insert I-Frames more frequently so we don't lose full seconds
316        // when popping frames from the front
317        encoder_ctx.set_gop(GOP_SIZE);
318
319        let encoder_params = ffmpeg::codec::Parameters::new();
320
321        let opts = Self::get_encoder_params(quality);
322
323        encoder_ctx.set_parameters(encoder_params)?;
324        let encoder = encoder_ctx.open_with(opts)?;
325        Ok(encoder)
326    }
327
328    fn get_encoder_params(quality: &QualityPreset) -> ffmpeg::Dictionary<'_> {
329        let mut opts = ffmpeg::Dictionary::new();
330        opts.set("vsync", "vfr");
331        opts.set("rc", "VBR");
332        match quality {
333            QualityPreset::Low => {
334                opts.set("qp", "30");
335            }
336            QualityPreset::Medium => {
337                opts.set("qp", "25");
338            }
339            QualityPreset::High => {
340                opts.set("qp", "20");
341            }
342            QualityPreset::Ultra => {
343                opts.set("qp", "15");
344            }
345        }
346        opts
347    }
348
349    fn create_filter_graph(
350        encoder: &ffmpeg::codec::encoder::Video,
351        width: u32,
352        height: u32,
353    ) -> Result<ffmpeg::filter::Graph> {
354        let mut graph = ffmpeg::filter::Graph::new();
355
356        let args = format!("video_size={width}x{height}:pix_fmt=bgra:time_base=1/1000000",);
357
358        let mut input = graph.add(&ffmpeg::filter::find("buffer").unwrap(), "in", &args)?;
359
360        let mut hwmap = graph.add(
361            &ffmpeg::filter::find("hwmap").unwrap(),
362            "hwmap",
363            "mode=read+write:derive_device=vaapi",
364        )?;
365
366        let scale_args = format!("w={width}:h={height}:format=nv12:out_range=tv");
367        let mut scale = graph.add(
368            &ffmpeg::filter::find("scale_vaapi").unwrap(),
369            "scale",
370            &scale_args,
371        )?;
372
373        let mut out = graph.add(&ffmpeg::filter::find("buffersink").unwrap(), "out", "")?;
374        unsafe {
375            let dev = (*encoder.as_ptr()).hw_device_ctx;
376
377            (*hwmap.as_mut_ptr()).hw_device_ctx = av_buffer_ref(dev);
378        }
379
380        input.link(0, &mut hwmap, 0);
381        hwmap.link(0, &mut scale, 0);
382        scale.link(0, &mut out, 0);
383
384        graph.validate()?;
385        log::trace!("VAAPI Graph\n{}", graph.dump());
386
387        Ok(graph)
388    }
389}
390
391impl Drop for VaapiEncoder {
392    fn drop(&mut self) {
393        if let Err(e) = self.drain() {
394            log::error!("Error while draining vaapi encoder during drop: {e:?}");
395        }
396        self.drop_processor();
397    }
398}