begin adding provision for hardware encoding functionality

This commit is contained in:
Lily Tsuru 2024-10-06 04:16:13 -04:00
parent 45a8d60e7a
commit a901365656
7 changed files with 283 additions and 10 deletions

10
server/Cargo.lock generated
View file

@ -273,6 +273,15 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "cudarc"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38cd60a9a42ec83a2ed7effb0b1f073270264ea99da7acfc44f7e8d74dee0384"
dependencies = [
"libloading",
]
[[package]] [[package]]
name = "data-encoding" name = "data-encoding"
version = "2.6.0" version = "2.6.0"
@ -1317,6 +1326,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"axum", "axum",
"cudarc",
"ffmpeg-next", "ffmpeg-next",
"futures", "futures",
"futures-util", "futures-util",

View file

@ -23,6 +23,7 @@ ffmpeg = { version = "7.0.0", package = "ffmpeg-next" }
rand = "0.8.5" rand = "0.8.5"
serde = "1.0.209" serde = "1.0.209"
serde_json = "1.0.128" serde_json = "1.0.128"
cudarc = "0.12.1"
[patch.crates-io] [patch.crates-io]

View file

@ -7,6 +7,8 @@ use tokio::sync::mpsc::{self, error::TryRecvError};
use super::ffmpeg; use super::ffmpeg;
use super::h264_encoder::H264Encoder; use super::h264_encoder::H264Encoder;
use super::hwframe::HwFrameContext;
pub enum EncodeThreadInput { pub enum EncodeThreadInput {
Init { size: crate::types::Size }, Init { size: crate::types::Size },
ForceKeyframe, ForceKeyframe,
@ -33,6 +35,36 @@ fn set_frame_flags(frame: &mut ffmpeg::Frame, force_keyframe: bool) {
} }
} }
fn create_frame(
width: u32,
height: u32,
pixel_format: ffmpeg::format::Pixel,
context: &mut HwFrameContext,
) -> anyhow::Result<ffmpeg::Frame> {
unsafe {
let mut frame = ffmpeg::Frame::empty();
(*frame.as_mut_ptr()).format = pixel_format as i32;
(*frame.as_mut_ptr()).width = width as i32;
(*frame.as_mut_ptr()).height = height as i32;
(*frame.as_mut_ptr()).hw_frames_ctx = context.as_raw_mut();
super::check_ret(ffmpeg::sys::av_hwframe_get_buffer(
context.as_raw_mut(),
frame.as_mut_ptr(),
0,
))?;
super::check_ret(ffmpeg::sys::av_hwframe_get_buffer(
context.as_raw_mut(),
frame.as_mut_ptr(),
0,
))?;
(*frame.as_mut_ptr()).linesize[0] = (*frame.as_ptr()).width * 4;
Ok(frame)
}
}
fn encoder_thread_main( fn encoder_thread_main(
mut rx: mpsc::Receiver<EncodeThreadInput>, mut rx: mpsc::Receiver<EncodeThreadInput>,
tx: mpsc::Sender<EncodeThreadOutput>, tx: mpsc::Sender<EncodeThreadOutput>,

View file

@ -1,8 +1,9 @@
use super::ffmpeg; use super::ffmpeg;
use super::hwframe::HwFrameContext;
use anyhow::Context; use anyhow::Context;
use ffmpeg::error::EAGAIN; use ffmpeg::error::EAGAIN;
use ffmpeg::codec as lavc; // lavc use ffmpeg::{codec as lavc, packet}; // lavc
use crate::types::Size; use crate::types::Size;
@ -30,8 +31,6 @@ fn create_context_and_set_common_parameters(
let mut video_encoder_context = create_context_from_codec(encoder)?.encoder().video()?; let mut video_encoder_context = create_context_from_codec(encoder)?.encoder().video()?;
video_encoder_context.set_width(size.width); video_encoder_context.set_width(size.width);
video_encoder_context.set_height(size.height); video_encoder_context.set_height(size.height);
video_encoder_context.set_frame_rate(Some(ffmpeg::Rational(1, max_framerate as i32))); video_encoder_context.set_frame_rate(Some(ffmpeg::Rational(1, max_framerate as i32)));
@ -63,8 +62,15 @@ fn create_context_and_set_common_parameters(
/// A simple H.264 encoder. Currently software only, however /// A simple H.264 encoder. Currently software only, however
/// pieces are being put in place to eventually allow HW encoding. /// pieces are being put in place to eventually allow HW encoding.
pub struct H264Encoder { pub enum H264Encoder {
Software {
encoder: ffmpeg::encoder::video::Encoder, encoder: ffmpeg::encoder::video::Encoder,
},
Nvenc {
encoder: ffmpeg::encoder::video::Encoder,
hw_context: HwFrameContext,
},
} }
impl H264Encoder { impl H264Encoder {
@ -107,21 +113,68 @@ impl H264Encoder {
.open_as_with(encoder, dict) .open_as_with(encoder, dict)
.with_context(|| "While opening x264 video codec")?; .with_context(|| "While opening x264 video codec")?;
Ok(Self { encoder: encoder }) Ok(Self::Software { encoder: encoder })
}
// FIXME: It's a bit pointless to have this have a mut borrow,
// but you'll probably have a mutable borrow on this already..
pub fn is_hardware(&mut self) -> bool {
match self {
Self::Software { .. } => false,
Self::Nvenc { .. } => true,
}
} }
pub fn send_frame(&mut self, frame: &ffmpeg::Frame) { pub fn send_frame(&mut self, frame: &ffmpeg::Frame) {
self.encoder.send_frame(frame).unwrap(); match self {
Self::Software { encoder } => {
encoder.send_frame(frame).unwrap();
}
Self::Nvenc {
encoder,
hw_context,
} => {
// Realistically this should be the same right?
todo!("Requires support.");
}
}
} }
pub fn send_eof(&mut self) { pub fn send_eof(&mut self) {
self.encoder.send_eof().unwrap(); match self {
Self::Software { encoder } => {
encoder.send_eof().unwrap();
}
Self::Nvenc {
encoder,
hw_context,
} => {
// Realistically this should be the same right?
todo!("Requires support.");
}
}
}
fn receive_packet_impl(&mut self, packet: &mut ffmpeg::Packet) -> Result<(), ffmpeg::Error> {
return match self {
Self::Software { encoder } => encoder.receive_packet(packet),
Self::Nvenc {
encoder,
hw_context,
} => {
// Realistically this should be the same right?
todo!("Requires support.");
}
};
} }
// Shuold this return a Result<ControlFlow> so we can make it easier to know when to continue? // Shuold this return a Result<ControlFlow> so we can make it easier to know when to continue?
pub fn receive_packet(&mut self, packet: &mut ffmpeg::Packet) -> anyhow::Result<()> { pub fn receive_packet(&mut self, packet: &mut ffmpeg::Packet) -> anyhow::Result<()> {
loop { loop {
match self.encoder.receive_packet(packet) { match self.receive_packet_impl(packet) {
Ok(_) => break, Ok(_) => break,
Err(ffmpeg::Error::Other { errno }) => { Err(ffmpeg::Error::Other { errno }) => {
if errno != EAGAIN { if errno != EAGAIN {

View file

@ -0,0 +1,78 @@
use std::ptr::null_mut;
use super::check_ret;
use super::ffmpeg;
pub struct CudaDeviceContext {
buffer: *mut ffmpeg::sys::AVBufferRef,
}
impl CudaDeviceContext {
fn new(buffer: *mut ffmpeg::sys::AVBufferRef) -> Self {
Self { buffer }
}
// pub fn as_device_mut(&mut self) -> &mut ffmpeg::sys::AVHWDeviceContext {
// unsafe { &mut *((*self.buffer).data as *mut ffmpeg::sys::AVHWDeviceContext) }
// }
// pub fn as_device(&self) -> &ffmpeg::sys::AVHWDeviceContext {
// unsafe { &*((*self.buffer).data as *const ffmpeg::sys::AVHWDeviceContext) }
// }
pub fn as_raw_mut(&mut self) -> &mut ffmpeg::sys::AVBufferRef {
unsafe { &mut *self.buffer }
}
// pub fn as_raw(&self) -> &ffmpeg::sys::AVBufferRef {
// unsafe { &*self.buffer }
// }
}
pub struct CudaDeviceContextBuilder {
buffer: *mut ffmpeg::sys::AVBufferRef,
}
impl CudaDeviceContextBuilder {
pub fn new() -> Result<Self, String> {
let buffer = unsafe { ffmpeg::sys::av_hwdevice_ctx_alloc(ffmpeg::sys::AVHWDeviceType::AV_HWDEVICE_TYPE_CUDA) };
if buffer.is_null() {
return Err("could not allocate a hwdevice".to_string());
}
Ok(Self { buffer })
}
pub fn build(mut self) -> Result<CudaDeviceContext, ffmpeg::Error> {
check_ret(unsafe { ffmpeg::sys::av_hwdevice_ctx_init(self.buffer) })?;
let result = Ok(CudaDeviceContext::new(self.buffer));
self.buffer = null_mut();
result
}
pub fn set_cuda_context(mut self, context: ffmpeg::sys::CUcontext) -> Self {
unsafe {
(*(self.as_device_mut().hwctx as *mut ffmpeg::sys::AVCUDADeviceContext)).cuda_ctx = context;
}
self
}
pub fn as_device_mut(&mut self) -> &mut ffmpeg::sys::AVHWDeviceContext {
unsafe { &mut *((*self.buffer).data as *mut ffmpeg::sys::AVHWDeviceContext) }
}
// pub fn as_device(&self) -> &ffmpeg::sys::AVHWDeviceContext {
// unsafe { &*((*self.buffer).data as *const ffmpeg::sys::AVHWDeviceContext) }
// }
// pub fn as_raw_mut(&mut self) -> &mut ffmpeg::sys::AVBufferRef {
// unsafe { &mut *self.buffer }
// }
// pub fn as_raw(&self) -> &ffmpeg::sys::AVBufferRef {
// unsafe { &*self.buffer }
// }
}

View file

@ -0,0 +1,88 @@
use std::ptr::null_mut;
use super::ffmpeg;
use ffmpeg::format::Pixel;
use super::{check_ret, hwdevice::CudaDeviceContext};
pub struct HwFrameContext {
_cuda_device_context: CudaDeviceContext,
buffer: *mut ffmpeg::sys::AVBufferRef,
}
impl HwFrameContext {
fn new(cuda_device_context: CudaDeviceContext, buffer: *mut ffmpeg::sys::AVBufferRef) -> Self {
Self { _cuda_device_context: cuda_device_context, buffer }
}
// pub fn as_context_mut(&mut self) -> &mut ffmpeg::sys::AVHWFramesContext {
// unsafe { &mut *((*self.buffer).data as *mut ffmpeg::sys::AVHWFramesContext) }
// }
// pub fn as_context(&self) -> &ffmpeg::sys::AVHWFramesContext {
// unsafe { &*((*self.buffer).data as *const ffmpeg::sys::AVHWFramesContext) }
// }
pub fn as_raw_mut(&mut self) -> &mut ffmpeg::sys::AVBufferRef {
unsafe { &mut *self.buffer }
}
// pub fn as_raw(&self) -> &ffmpeg::sys::AVBufferRef {
// unsafe { &*self.buffer }
// }
}
unsafe impl Send for HwFrameContext { }
pub struct HwFrameContextBuilder {
cuda_device_context: CudaDeviceContext,
buffer: *mut ffmpeg::sys::AVBufferRef,
}
impl HwFrameContextBuilder {
pub fn new(mut cuda_device_context: CudaDeviceContext) -> Result<Self, String> {
let buffer = unsafe { ffmpeg::sys::av_hwframe_ctx_alloc(cuda_device_context.as_raw_mut()) };
if buffer.is_null() {
return Err("could not allocate a hwframe".to_string());
}
Ok(Self { cuda_device_context, buffer })
}
pub fn build(mut self) -> Result<HwFrameContext, ffmpeg::Error> {
check_ret(unsafe { ffmpeg::sys::av_hwframe_ctx_init(self.buffer) })?;
let result = Ok(HwFrameContext::new(self.cuda_device_context, self.buffer));
self.buffer = null_mut();
result
}
pub fn set_width(mut self, width: u32) -> Self {
self.as_frame_mut().width = width as i32;
self
}
pub fn set_height(mut self, height: u32) -> Self {
self.as_frame_mut().height = height as i32;
self
}
pub fn set_sw_format(mut self, sw_format: Pixel) -> Self {
self.as_frame_mut().sw_format = sw_format.into();
self
}
pub fn set_format(mut self, format: Pixel) -> Self {
self.as_frame_mut().format = format.into();
self
}
pub fn as_frame_mut(&mut self) -> &mut ffmpeg::sys::AVHWFramesContext {
unsafe { &mut *((*self.buffer).data as *mut ffmpeg::sys::AVHWFramesContext) }
}
// pub fn as_frame(&self) -> &ffmpeg::sys::AVHWFramesContext {
// unsafe { &*((*self.buffer).data as *const ffmpeg::sys::AVHWFramesContext) }
// }
}

View file

@ -5,5 +5,16 @@ pub mod h264_encoder;
/// Re-export of `ffmpeg_the_third` crate in an infinitely less obtuse name. /// Re-export of `ffmpeg_the_third` crate in an infinitely less obtuse name.
pub use ffmpeg as ffmpeg; pub use ffmpeg as ffmpeg;
pub mod hwdevice;
pub mod hwframe;
pub use encoder_thread::*; pub use encoder_thread::*;
// from hgaiser/moonshine
pub fn check_ret(error_code: i32) -> Result<(), ffmpeg::Error> {
if error_code != 0 {
return Err(ffmpeg::Error::from(error_code));
}
Ok(())
}