diff --git a/server/src/video/encoder_thread.rs b/server/src/video/encoder_thread.rs index 49f11d1..d656e24 100644 --- a/server/src/video/encoder_thread.rs +++ b/server/src/video/encoder_thread.rs @@ -35,7 +35,7 @@ fn set_frame_flags(frame: &mut ffmpeg::Frame, force_keyframe: bool) { } } -fn create_frame( +fn create_hardware_frame( width: u32, height: u32, pixel_format: ffmpeg::format::Pixel, @@ -89,7 +89,6 @@ fn encoder_thread_main( force_keyframe = false; } - encoder = Some(H264Encoder::new_nvenc( &dev, @@ -98,13 +97,6 @@ fn encoder_thread_main( 5 * (1000 * 1000), )?); - - let mut producer_frame_locked = frame.lock().expect("Couldn't lock producer frame"); - let mut producer_frame = producer_frame_locked.as_mut().expect("NOOOO"); - - unsafe { - (*producer_frame.as_mut_ptr()).hw_frames_ctx = encoder.as_mut().unwrap().get_hw_context().as_raw_mut(); - } } EncodeThreadInput::ForceKeyframe => { @@ -125,8 +117,7 @@ fn encoder_thread_main( set_frame_flags(producer_frame, force_keyframe); unsafe { - (*producer_frame.as_mut_ptr()).pts = frame_number as i64; - + (*producer_frame.as_mut_ptr()).pts = frame_number as i64; } if enc.is_hardware() { diff --git a/server/src/video/h264_encoder.rs b/server/src/video/h264_encoder.rs index 7dfeb3e..c7693ff 100644 --- a/server/src/video/h264_encoder.rs +++ b/server/src/video/h264_encoder.rs @@ -6,7 +6,6 @@ use ffmpeg::error::EAGAIN; use ffmpeg::{codec as lavc, packet}; // lavc - use crate::types::Size; /// this is required for libx264 to like. Work @@ -37,12 +36,12 @@ fn create_context_and_set_common_parameters( video_encoder_context.set_height(size.height); video_encoder_context.set_frame_rate(Some(ffmpeg::Rational(1, max_framerate as i32))); - video_encoder_context.set_bit_rate(bitrate); - //video_encoder_context.set_max_bit_rate(bitrate); + video_encoder_context.set_bit_rate(bitrate / 4); + video_encoder_context.set_max_bit_rate(bitrate); // qp TODO: - video_encoder_context.set_qmax(30); - video_encoder_context.set_qmin(35); + //video_encoder_context.set_qmax(30); + //video_encoder_context.set_qmin(35); video_encoder_context.set_time_base(ffmpeg::Rational(1, max_framerate as i32).invert()); video_encoder_context.set_format(ffmpeg::format::Pixel::YUV420P); @@ -71,7 +70,14 @@ pub enum H264Encoder { Nvenc { encoder: ffmpeg::encoder::video::Encoder, - hw_context: HwFrameContext, + // FIXME: This will be needed if the user wants to encode + // frames always stored in GPU memory. For now we let ffmpeg upload + // and download frames to the GPU, but at some point + // it would be a good idea to have, say, + // new_nvenc_hwframe(dev, ...) + // new_nvenc_cpuframe(...) (has the same behaviour as current new_nvenc) + + //hw_context: HwFrameContext, }, } @@ -111,7 +117,6 @@ impl H264Encoder { dict.set("forced-idr", "1"); - let encoder = video_encoder_context .open_as_with(encoder, dict) .with_context(|| "While opening x264 video codec")?; @@ -124,33 +129,34 @@ impl H264Encoder { cuda_device: &CudaDevice, size: Size, max_framerate: u32, - bitrate: usize + bitrate: usize, ) -> anyhow::Result { let (mut encoder, mut video_encoder_context) = create_context_and_set_common_parameters("h264_nvenc", &size, max_framerate, bitrate) - .with_context(|| "while trying to create encoder")?; - + .with_context(|| "while trying to create encoder")?; + + /* + (See FIXMEs above) + let cuda_device_context = super::hwdevice::CudaDeviceContextBuilder::new()? - .set_cuda_context((*cuda_device.cu_primary_ctx()) as *mut _) - .build() + .set_cuda_context((*cuda_device.cu_primary_ctx()) as *mut _) + .build() .with_context(|| "while trying to create CUDA device context")?; - let mut hw_frame_context = super::hwframe::HwFrameContextBuilder::new(cuda_device_context)? - .set_width(size.width) - .set_height(size.height) - .set_sw_format(ffmpeg::format::Pixel::ZRGB32) - .set_format(ffmpeg::format::Pixel::CUDA) - .build() + let mut hw_frame_context = super::hwframe::HwFrameContextBuilder::new(cuda_device_context)? + .set_width(size.width) + .set_height(size.height) + .set_sw_format(ffmpeg::format::Pixel::ZRGB32) + .set_format(ffmpeg::format::Pixel::CUDA) + .build() .with_context(|| "while trying to create CUDA frame context")?; - // lol you do not need unsafe code to set this my guy. + */ + video_encoder_context.set_format(ffmpeg::format::Pixel::ZRGB32); - - unsafe { - //(*video_encoder_context.as_mut_ptr()).hw_frames_ctx = hw_frame_context.as_raw_mut(); - //(*video_encoder_context.as_mut_ptr()).delay = 0; - //(*video_encoder_context.as_mut_ptr()).refs = 0; - } + + video_encoder_context.set_qmin(38); + video_encoder_context.set_qmax(35); // set h264_nvenc options let mut dict = ffmpeg::Dictionary::new(); @@ -161,19 +167,20 @@ impl H264Encoder { dict.set("profile", "main"); // TODO: - dict.set("crf", "43"); - dict.set("crf_max", "48"); + dict.set("rc", "vbr"); + //dict.set("qp", "45"); dict.set("forced-idr", "1"); - + // damn you dict.set("delay", "0"); + dict.set("zerolatency", "1"); let encoder = video_encoder_context - .open_as_with(encoder, dict) - .with_context(|| "While opening h264_nvenc video codec")?; - - Ok(Self::Nvenc { encoder: encoder, hw_context: hw_frame_context }) + .open_as_with(encoder, dict) + .with_context(|| "While opening h264_nvenc video codec")?; + + Ok(Self::Nvenc { encoder: encoder }) } // FIXME: It's a bit pointless to have this have a mut borrow, @@ -185,12 +192,12 @@ impl H264Encoder { } } - pub fn get_hw_context(&mut self) -> &mut HwFrameContext { - match self { - Self::Nvenc { encoder: _, hw_context } => hw_context, - _ => panic!("should not use H264Encoder::get_hw_context() on a Software encoder") - } - } + //pub fn get_hw_context(&mut self) -> &mut HwFrameContext { + // match self { + // Self::Nvenc { encoder: _, hw_context } => hw_context, + // _ => panic!("should not use H264Encoder::get_hw_context() on a Software encoder") + // } + //} pub fn send_frame(&mut self, frame: &ffmpeg::Frame) { match self { @@ -198,10 +205,7 @@ impl H264Encoder { encoder.send_frame(frame).unwrap(); } - Self::Nvenc { - encoder, - hw_context, - } => { + Self::Nvenc { encoder } => { // Realistically this should be the same right? encoder.send_frame(frame).unwrap(); //todo!("Requires support."); @@ -215,13 +219,10 @@ impl H264Encoder { encoder.send_eof().unwrap(); } - Self::Nvenc { - encoder, - hw_context, - } => { + Self::Nvenc { encoder } => { // Realistically this should be the same right? encoder.send_eof().unwrap(); - // todo!("Requires support."); + // todo!("Requires support."); } } } @@ -230,14 +231,7 @@ impl H264Encoder { return match self { Self::Software { encoder } => encoder.receive_packet(packet), - Self::Nvenc { - encoder, - hw_context, - } => { - // Realistically this should be the same right? - encoder.receive_packet(packet) - //todo!("Requires support."); - } + Self::Nvenc { encoder } => encoder.receive_packet(packet), }; } diff --git a/server/src/video/hwframe.rs b/server/src/video/hwframe.rs index b642a87..e58f72a 100644 --- a/server/src/video/hwframe.rs +++ b/server/src/video/hwframe.rs @@ -44,7 +44,7 @@ impl HwFrameContextBuilder { pub fn new(mut cuda_device_context: CudaDeviceContext) -> anyhow::Result { let buffer = unsafe { ffmpeg::sys::av_hwframe_ctx_alloc(cuda_device_context.as_raw_mut()) }; if buffer.is_null() { - return Err(anyhow::anyhow!("could not allocate a hwframe")); + return Err(anyhow::anyhow!("could not allocate a hwframe context")); } Ok(Self { cuda_device_context, buffer }) diff --git a/server/src/video/mod.rs b/server/src/video/mod.rs index e5b8689..518c00b 100644 --- a/server/src/video/mod.rs +++ b/server/src/video/mod.rs @@ -2,7 +2,7 @@ pub mod encoder_thread; pub mod h264_encoder; //pub mod lc_muxer; -/// Re-export of `ffmpeg_the_third` crate in an infinitely less obtuse name. +/// Re-export of `ffmpeg` crate. pub use ffmpeg as ffmpeg; pub mod hwdevice;