Compare commits

...

5 commits

Author SHA1 Message Date
9e2324fc4a move app code to new module
Mostly to keep it clean.
2024-08-06 22:08:37 -04:00
b377686c4b remove some chatty debug logs that don't matter anymore 2024-08-06 22:03:03 -04:00
3be054c390 Implement need_fullpath support
This makes it much less painful to specify disk images (as well as reduces memory consumption when loading!)
2024-08-06 21:57:47 -04:00
16e6875228 Make getting AV info lazy
Some cores (Dolphin) don't initalize AV info, or use resources that are not allocated yet, until after a game is loaded.
2024-08-06 21:44:41 -04:00
4e8ad7616f retrovnc: cleanup and safety annotations 2024-08-06 08:07:31 -04:00
6 changed files with 500 additions and 412 deletions

View file

@ -1,14 +1,15 @@
# retrovnc
a headless Libretro frontend that exports a VNC server.
A fully headless Libretro frontend that exports a VNC server for display and input.
This is mostly a "fun project", and probably isn't a very great solution for remote gaming.
In layman terms, this lets you play games over VNC. Which isn't all that great, but hey.
This is mostly a "fun project" and consists mostly of code I already wrote.
# Dependencies
- A C++ toolchain
- A Rust toolchain.
- Maybe libvncserver (i'm not sure, it seems like the package can build it).
- A Rust toolchain.
# Building
@ -16,8 +17,9 @@ This is mostly a "fun project", and probably isn't a very great solution for rem
# Usage
`$ retrovnc --core <CORE> --rom <ROM>`
`$ retrovnc --core <CORE> --rom <ROM>` (see `retrovnc --help` for more options)
For disc-based titles it is probably a good idea to pass the cuesheet file. I will implement stuff later to make this less annoying.

View file

@ -68,6 +68,7 @@ pub struct Frontend {
pub(crate) game_loaded: bool,
pub(crate) av_info: Option<SystemAvInfo>,
pub(crate) sys_info: Option<SystemInfo>,
/// The core's requested pixel format.
/// TODO: HW accel. (or just not care)
@ -114,6 +115,7 @@ impl Frontend {
game_loaded: false,
av_info: None,
sys_info: None,
pixel_format: PixelFormat::RGB565,
converted_pixel_buffer: Vec::new(),
@ -327,13 +329,6 @@ impl Frontend {
);
info!("Core {} loaded", path.as_ref().display());
// Get AV info
// Like core API, we have to MaybeUninit again.
let mut av_info: MaybeUninit<SystemAvInfo> = MaybeUninit::uninit();
(core_api_ref.retro_get_system_av_info)(av_info.as_mut_ptr());
self.av_info = Some(av_info.assume_init());
}
Ok(())
@ -381,32 +376,46 @@ impl Frontend {
return Err(Error::CoreNotLoaded);
}
// For now I'm only implementing the gameinfo garbage that
// makes you read the whole file in. Later on I'll look into VFS
// support; but for now, it seems more cores will probably
// play ball with this.. which sucks :(
// I'm aware this is nasty but bleh
let slice = path.as_ref().as_os_str().as_bytes();
let path_string = CString::new(slice).expect("shouldn't fail");
let contents = fs::read(path)?;
let gameinfo = GameInfo {
path: path_string.as_ptr(),
data: contents.as_ptr() as *const ffi::c_void,
size: contents.len(),
meta: std::ptr::null(),
};
let system_info = self.get_system_info()?;
let core_api = self.core_api.as_ref().unwrap();
unsafe {
if !(core_api.retro_load_game)(&gameinfo) {
return Err(Error::RomLoadFailed);
}
let mut gameinfo = GameInfo {
path: path_string.as_ptr(),
data: std::ptr::null(),
size: 0,
meta: std::ptr::null(),
};
self.game_loaded = true;
Ok(())
// If the core does not need fullpath, then
// read the file data into a buffer we give to the core.
// This is pretty wasteful but works.
if !system_info.need_fullpath {
let contents = fs::read(path)?;
gameinfo.data = contents.as_ptr() as *const ffi::c_void;
gameinfo.size = contents.len();
unsafe {
if !(core_api.retro_load_game)(&gameinfo) {
return Err(Error::RomLoadFailed);
}
self.game_loaded = true;
Ok(())
}
} else {
unsafe {
if !(core_api.retro_load_game)(&gameinfo) {
return Err(Error::RomLoadFailed);
}
self.game_loaded = true;
Ok(())
}
}
}
@ -436,7 +445,39 @@ impl Frontend {
if let Some(av) = self.av_info.as_ref() {
Ok(av.clone())
} else {
Err(Error::NoAvInfo)
// Get AV info
// Like core API, we have to MaybeUninit again.
let mut av_info: MaybeUninit<SystemAvInfo> = MaybeUninit::uninit();
unsafe {
let core_api = self.core_api.as_ref().unwrap();
(core_api.retro_get_system_av_info)(av_info.as_mut_ptr());
self.av_info = Some(av_info.assume_init());
}
Ok(self.av_info.as_ref().unwrap().clone())
}
}
pub fn get_system_info(&mut self) -> Result<SystemInfo> {
if !self.core_loaded() {
return Err(Error::CoreNotLoaded);
}
if let Some(sys) = self.sys_info.as_ref() {
Ok(sys.clone())
} else {
let mut sys_info: MaybeUninit<SystemInfo> = MaybeUninit::uninit();
// Actually get the system info
unsafe {
let core_api = self.core_api.as_ref().unwrap();
(core_api.retro_get_system_info)(sys_info.as_mut_ptr());
self.sys_info = Some(sys_info.assume_init());
}
Ok(self.sys_info.as_ref().unwrap().clone())
}
}

View file

@ -6,7 +6,7 @@ use rgb565::Rgb565;
use std::ffi;
use tracing::{debug, error, info};
use tracing::{debug, error};
/// This function is used with HW OpenGL cores to transfer the current FBO's ID.
unsafe extern "C" fn hw_gl_get_framebuffer() -> usize {
@ -110,7 +110,9 @@ pub(crate) unsafe extern "C" fn environment_callback(
let hw_render_context_type =
HwContextType::from_uint(hw_render.context_type).expect("Uh oh!");
if hw_render_context_type != HwContextType::OpenGL && hw_render_context_type != HwContextType::OpenGLCore {
if hw_render_context_type != HwContextType::OpenGL
&& hw_render_context_type != HwContextType::OpenGLCore
{
error!(
"Core is trying to request an context type we don't support ({:?}), failing",
hw_render_context_type
@ -118,8 +120,6 @@ pub(crate) unsafe extern "C" fn environment_callback(
return false;
}
info!("Core requesting context type {:?}", hw_render_context_type);
let init_data = (*(*FRONTEND).interface).hw_gl_init();
hw_render.get_current_framebuffer = hw_gl_get_framebuffer;
@ -239,9 +239,8 @@ pub(crate) unsafe extern "C" fn video_refresh_callback(
(pitch * height as usize) as usize,
);
// Resize the pixel buffer if we need to
// Resize the conversion buffer if we need to
if (pitch * height as usize) as usize != (*FRONTEND).converted_pixel_buffer.len() {
info!("Resizing RGB565 -> RGBA buffer");
(*FRONTEND)
.converted_pixel_buffer
.resize((pitch * height as usize) as usize, 0);

394
crates/retrovnc/src/app.rs Normal file
View file

@ -0,0 +1,394 @@
use crate::egl;
use crate::rfb::*;
use std::{path::Path, time::Duration};
use retro_frontend::{
frontend::{Frontend, FrontendInterface, HwGlInitData},
input_devices::{InputDevice, RetroPad},
};
use anyhow::Result;
/// Called by OpenGL. We use this to dump errors.
extern "system" fn opengl_message_callback(
source: gl::types::GLenum,
_type: gl::types::GLenum,
id: gl::types::GLuint,
_severity: gl::types::GLenum,
_length: gl::types::GLsizei,
message: *const gl::types::GLchar,
_user: *mut std::ffi::c_void,
) {
unsafe {
let message = std::ffi::CStr::from_ptr(message);
if _type == gl::DEBUG_TYPE_ERROR {
tracing::error!(
"OpenGL error: {:?} (res {:08x}, id = {:08x}, source = {:08x})",
message,
_type,
id,
source
);
}
}
}
pub struct App {
/// The frontend.
frontend: Option<Box<Frontend>>,
/// VNC server
rfb_server: Box<RfbServer>,
pad: RetroPad,
/// True if HW rendering is active.
hw_render: bool,
// EGL state
egl_display: egl::types::EGLDisplay,
egl_context: egl::types::EGLContext,
// OpenGL object IDs
texture_id: gl::types::GLuint,
renderbuffer_id: gl::types::GLuint,
fbo_id: gl::types::GLuint,
/// Cached readback buffer.
readback_buffer: Vec<u32>,
}
impl App {
pub fn new(rfb_config: RfbServerConfig) -> Result<Box<Self>> {
let mut boxed = Box::new(Self {
frontend: None,
rfb_server: RfbServer::new(rfb_config)?,
pad: RetroPad::new(),
hw_render: false,
egl_display: std::ptr::null(),
egl_context: std::ptr::null(),
texture_id: 0,
renderbuffer_id: 0,
fbo_id: 0,
readback_buffer: Vec::new(),
});
// SAFETY: The boxed allocation will never drop since the main loop always loops forever.
// Even if it did, the only way to touch the pointer involves the frontend library calling retro_run,
// and the core calling one of the given callbacks. Therefore this is gnarly, but "fine".
//
// I'm still not really sure how to tell the borrow checker that this is alright,
// short of Box::leak() (which I don't want to do, since ideally I'd like actual cleanup to occur).
let obj = &mut *boxed as &mut dyn FrontendInterface;
boxed.frontend = Some(Frontend::new(obj as *mut dyn FrontendInterface));
Ok(boxed)
}
fn get_frontend(&mut self) -> &mut Frontend {
self.frontend.as_mut().unwrap()
}
pub fn init(&mut self) {
// Currently retrovnc just hardcodes the assumption of a single RetroPad.
// SAFETY: This too won't ever be Use-After-Free'd because the only oppoturnity to
// goes away on drop as well. That's a bit flaky reasoning wise, but is true.
//
// In all honesty, I'm not sure this even needs to be a *mut so I could see if
// making it a immutable reference works.
let pad = &mut self.pad as *mut dyn InputDevice;
self.get_frontend().plug_input_device(0, pad);
// Initalize the display
self.init_display();
}
fn init_display(&mut self) {
let av_info = self.get_frontend().get_av_info().expect("No AV info");
// Start VNC server.
{
let server = &mut self.rfb_server;
tracing::info!("Starting VNC server");
server.start();
server.resize(
av_info.geometry.base_width as u16,
av_info.geometry.base_height as u16,
);
}
}
pub fn load_core<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
// Unload an existing core.
if self.get_frontend().core_loaded() {
let _ = self.get_frontend().unload_core();
}
self.get_frontend().load_core(path)?;
Ok(())
}
pub fn load_game<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
self.get_frontend().load_game(path)?;
Ok(())
}
/// Initalizes a headless EGL context for OpenGL rendering.
unsafe fn hw_gl_egl_init(&mut self) {
self.egl_display = egl::get_device_platform_display();
self.egl_context = {
const EGL_CONFIG_ATTRIBUTES: [egl::types::EGLenum; 13] = [
egl::SURFACE_TYPE,
egl::PBUFFER_BIT,
egl::BLUE_SIZE,
8,
egl::RED_SIZE,
8,
egl::BLUE_SIZE,
8,
egl::DEPTH_SIZE,
8,
egl::RENDERABLE_TYPE,
egl::OPENGL_BIT,
egl::NONE,
];
let mut egl_major: egl::EGLint = 0;
let mut egl_minor: egl::EGLint = 0;
let mut egl_config_count: egl::EGLint = 0;
let mut config: egl::types::EGLConfig = std::ptr::null();
egl::Initialize(
self.egl_display,
std::ptr::addr_of_mut!(egl_major),
std::ptr::addr_of_mut!(egl_minor),
);
egl::ChooseConfig(
self.egl_display,
EGL_CONFIG_ATTRIBUTES.as_ptr() as *const egl::EGLint,
std::ptr::addr_of_mut!(config),
1,
std::ptr::addr_of_mut!(egl_config_count),
);
egl::BindAPI(egl::OPENGL_API);
let context =
egl::CreateContext(self.egl_display, config, egl::NO_CONTEXT, std::ptr::null());
// Make the context current on the display so OpenGL routines "just work"
egl::MakeCurrent(self.egl_display, egl::NO_SURFACE, egl::NO_SURFACE, context);
context
};
}
/// Destroys EGL resources.
unsafe fn hw_gl_egl_exit(&mut self) {
if self.hw_render {
// Delete FBO
self.hw_gl_delete_fbo();
// Release the EGL context we created before destroying it
egl::MakeCurrent(
self.egl_display,
egl::NO_SURFACE,
egl::NO_SURFACE,
egl::NO_CONTEXT,
);
egl::DestroyContext(self.egl_display, self.egl_context);
egl::Terminate(self.egl_display);
self.egl_display = std::ptr::null();
self.egl_context = std::ptr::null();
}
}
/// Deletes all OpenGL FBO resources (the FBO itself, the render texture, and the renderbuffer used for depth)
fn hw_gl_delete_fbo(&mut self) {
unsafe {
gl::DeleteFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
self.fbo_id = 0;
gl::DeleteTextures(1, std::ptr::addr_of_mut!(self.texture_id));
self.texture_id = 0;
gl::DeleteRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
self.renderbuffer_id = 0;
}
}
fn hw_gl_create_fbo(&mut self, width: u32, height: u32) {
unsafe {
if self.fbo_id != 0 {
self.hw_gl_delete_fbo();
}
gl::GenFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
gl::GenTextures(1, std::ptr::addr_of_mut!(self.texture_id));
gl::BindTexture(gl::TEXTURE_2D, self.texture_id);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA8 as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
std::ptr::null(),
);
gl::GenRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
gl::BindRenderbuffer(gl::RENDERBUFFER, self.renderbuffer_id);
gl::RenderbufferStorage(
gl::RENDERBUFFER,
gl::DEPTH_COMPONENT,
width as i32,
height as i32,
);
gl::FramebufferTexture2D(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
gl::TEXTURE_2D,
self.texture_id,
0,
);
gl::BindTexture(gl::TEXTURE_2D, 0);
gl::FramebufferRenderbuffer(
gl::FRAMEBUFFER,
gl::DEPTH_ATTACHMENT,
gl::RENDERBUFFER,
self.renderbuffer_id,
);
gl::BindRenderbuffer(gl::RENDERBUFFER, 0);
gl::Viewport(0, 0, width as i32, height as i32);
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
// Notify the frontend layer about the new FBO
let id = self.fbo_id;
self.get_frontend().set_gl_fbo(id);
// Resize the readback buffer
self.readback_buffer.resize((width * height) as usize, 0);
}
}
/// The main loop. Should probably be abstracted a bit better.
pub fn main_loop(&mut self) -> ! {
let frontend = self.get_frontend();
let av_info = frontend.get_av_info().expect("???");
let step_ms = (1.0 / av_info.timing.fps) * 1000.;
let step_duration = Duration::from_micros((step_ms * 1000.) as u64);
// Do the main loop
loop {
frontend.run_frame();
std::thread::sleep(step_duration);
}
}
}
impl FrontendInterface for App {
fn video_resize(&mut self, width: u32, height: u32) {
tracing::info!("Resized to {width}x{height}");
// Resize OpenGL resources if we need to.
if self.hw_render {
self.hw_gl_create_fbo(width, height);
}
self.rfb_server.resize(width as u16, height as u16);
}
fn video_update(&mut self, slice: &[u32], pitch: u32) {
//let framebuffer_size = self.get_frontend().get_size();
self.rfb_server.update_buffer(&slice, pitch, false);
}
fn video_update_gl(&mut self) {
let dimensions = self.get_frontend().get_size();
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
// I know this sucks but it works for this case.
gl::ReadPixels(
0,
0,
dimensions.0 as i32,
dimensions.1 as i32,
gl::RGBA,
gl::UNSIGNED_BYTE,
self.readback_buffer.as_mut_ptr() as *mut std::ffi::c_void,
);
self.rfb_server
.update_buffer(&self.readback_buffer[..], dimensions.0, true);
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
}
fn audio_sample(&mut self, _slice: &[i16], _size: usize) {}
fn input_poll(&mut self) {
self.pad.reset();
// Press all buttons the VNC server marked as pressed
let buttons = self.rfb_server.get_buttons();
for i in 0..buttons.len() {
if buttons[i] {
self.pad.press_button(i as u32, None);
}
}
}
fn hw_gl_init(&mut self) -> HwGlInitData {
if self.hw_render {
panic!("Cannot initalize HW rendering more than once");
}
unsafe {
// Initalize EGL
self.hw_gl_egl_init();
// load OpenGL functions (using EGL loader. We should probably check the one extension exists)
gl::load_with(|s| {
let str = std::ffi::CString::new(s).expect("Uhh huh.");
std::mem::transmute(egl::GetProcAddress(str.as_ptr()))
});
// set OpenGL debug message callback
gl::Enable(gl::DEBUG_OUTPUT);
gl::DebugMessageCallback(Some(opengl_message_callback), std::ptr::null());
// Create the initial FBO for the core to render to
let dimensions = self.get_frontend().get_size();
self.hw_gl_create_fbo(dimensions.0, dimensions.1);
}
self.hw_render = true;
return unsafe {
HwGlInitData {
get_proc_address: std::mem::transmute(egl::GetProcAddress as *mut std::ffi::c_void),
}
};
}
}

View file

@ -1,3 +1,5 @@
//! EGL bindings and helpers.
#[allow(non_camel_case_types)]
#[allow(unused_imports)]
mod egl_impl {
@ -15,6 +17,8 @@ mod egl_impl {
include!(concat!(env!("OUT_DIR"), "/egl_bindings.rs"));
// TODO: Move these helpers to a new "helpers" module.
pub type GetPlatformDisplayExt = unsafe extern "C" fn(
platform: types::EGLenum,
native_display: *const std::ffi::c_void,

View file

@ -1,382 +1,18 @@
use std::{net::Ipv4Addr, path::Path, time::Duration};
use anyhow::Result;
use std::net::Ipv4Addr;
use retro_frontend::{
frontend::{Frontend, FrontendInterface, HwGlInitData},
input_devices::{InputDevice, RetroPad},
};
use tracing::Level;
use tracing_subscriber::FmtSubscriber;
use clap::{arg, command, value_parser};
/// Called by OpenGL. We use this to dump errors.
extern "system" fn opengl_message_callback(
source: gl::types::GLenum,
_type: gl::types::GLenum,
id: gl::types::GLuint,
_severity: gl::types::GLenum,
_length: gl::types::GLsizei,
message: *const gl::types::GLchar,
_user: *mut std::ffi::c_void,
) {
unsafe {
let message = std::ffi::CStr::from_ptr(message);
if _type == gl::DEBUG_TYPE_ERROR {
tracing::error!(
"OpenGL error: {:?} (res {:08x}, id = {:08x}, source = {:08x})",
message,
_type,
id,
source
);
}
}
}
mod app;
mod egl;
mod rfb;
use rfb::*;
struct App {
frontend: Option<Box<Frontend>>,
rfb_server: Box<RfbServer>,
pad: RetroPad,
hw_render: bool,
// EGL state
egl_display: egl::types::EGLDisplay,
egl_context: egl::types::EGLContext,
// OpenGL object IDs
texture_id: gl::types::GLuint,
renderbuffer_id: gl::types::GLuint,
fbo_id: gl::types::GLuint,
/// Cached readback buffer.
readback_buffer: Vec<u32>,
}
impl App {
fn new(rfb_config: RfbServerConfig) -> Result<Box<Self>> {
let mut boxed = Box::new(Self {
frontend: None,
rfb_server: RfbServer::new(rfb_config)?,
pad: RetroPad::new(),
hw_render: false,
egl_display: std::ptr::null(),
egl_context: std::ptr::null(),
texture_id: 0,
renderbuffer_id: 0,
fbo_id: 0,
readback_buffer: Vec::new(),
});
// Very very nasty, but honestly it works.
// I'll look into cleaning it up later.
let obj = &mut *boxed as &mut dyn FrontendInterface;
boxed.frontend = Some(Frontend::new(obj as *mut dyn FrontendInterface));
Ok(boxed)
}
fn get_frontend(&mut self) -> &mut Frontend {
self.frontend.as_mut().unwrap()
}
fn init(&mut self) {
// Currently retrovnc just hardcodes the assumption of a single RetroPad.
let pad = &mut self.pad as *mut dyn InputDevice;
self.get_frontend().plug_input_device(0, pad);
// Initalize the display
self.init_display();
}
fn init_display(&mut self) {
let av_info = self.get_frontend().get_av_info().expect("No AV info");
// Start VNC server.
{
let server = &mut self.rfb_server;
tracing::info!("Starting VNC server");
server.start();
server.resize(
av_info.geometry.base_width as u16,
av_info.geometry.base_height as u16,
);
}
}
fn load_core<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
if self.get_frontend().core_loaded() {
println!("???");
let _ = self.get_frontend().unload_core();
}
self.get_frontend().load_core(path)?;
Ok(())
}
fn load_game<P: AsRef<Path>>(&mut self, path: P) -> Result<()> {
self.get_frontend().load_game(path)?;
Ok(())
}
/// Initalizes a headless EGL context for OpenGL rendering.
unsafe fn hw_gl_egl_init(&mut self) {
self.egl_display = egl::get_device_platform_display();
self.egl_context = {
const EGL_CONFIG_ATTRIBUTES: [egl::types::EGLenum; 13] = [
egl::SURFACE_TYPE,
egl::PBUFFER_BIT,
egl::BLUE_SIZE,
8,
egl::RED_SIZE,
8,
egl::BLUE_SIZE,
8,
egl::DEPTH_SIZE,
8,
egl::RENDERABLE_TYPE,
egl::OPENGL_BIT,
egl::NONE,
];
let mut egl_major: egl::EGLint = 0;
let mut egl_minor: egl::EGLint = 0;
let mut egl_config_count: egl::EGLint = 0;
let mut config: egl::types::EGLConfig = std::ptr::null();
egl::Initialize(
self.egl_display,
std::ptr::addr_of_mut!(egl_major),
std::ptr::addr_of_mut!(egl_minor),
);
egl::ChooseConfig(
self.egl_display,
EGL_CONFIG_ATTRIBUTES.as_ptr() as *const egl::EGLint,
std::ptr::addr_of_mut!(config),
1,
std::ptr::addr_of_mut!(egl_config_count),
);
egl::BindAPI(egl::OPENGL_API);
let context =
egl::CreateContext(self.egl_display, config, egl::NO_CONTEXT, std::ptr::null());
// Make the context current on the display so OpenGL routines "just work"
egl::MakeCurrent(self.egl_display, egl::NO_SURFACE, egl::NO_SURFACE, context);
context
};
}
/// Destroys EGL resources.
unsafe fn hw_gl_egl_exit(&mut self) {
// Release the EGL context we created before destroying it
egl::MakeCurrent(
self.egl_display,
egl::NO_SURFACE,
egl::NO_SURFACE,
egl::NO_CONTEXT,
);
egl::DestroyContext(self.egl_display, self.egl_context);
egl::Terminate(self.egl_display);
self.egl_display = std::ptr::null();
self.egl_context = std::ptr::null();
}
fn hw_gl_delete_fbo(&mut self) {
unsafe {
gl::DeleteFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
self.fbo_id = 0;
gl::DeleteTextures(1, std::ptr::addr_of_mut!(self.texture_id));
self.texture_id = 0;
gl::DeleteRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
self.renderbuffer_id = 0;
}
}
fn hw_gl_create_fbo(&mut self, width: u32, height: u32) {
unsafe {
if self.fbo_id != 0 {
self.hw_gl_delete_fbo();
}
gl::GenFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
gl::GenTextures(1, std::ptr::addr_of_mut!(self.texture_id));
gl::BindTexture(gl::TEXTURE_2D, self.texture_id);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA8 as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
std::ptr::null(),
);
gl::GenRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
gl::BindRenderbuffer(gl::RENDERBUFFER, self.renderbuffer_id);
gl::RenderbufferStorage(
gl::RENDERBUFFER,
gl::DEPTH_COMPONENT,
width as i32,
height as i32,
);
gl::FramebufferTexture2D(
gl::FRAMEBUFFER,
gl::COLOR_ATTACHMENT0,
gl::TEXTURE_2D,
self.texture_id,
0,
);
gl::BindTexture(gl::TEXTURE_2D, 0);
gl::FramebufferRenderbuffer(
gl::FRAMEBUFFER,
gl::DEPTH_ATTACHMENT,
gl::RENDERBUFFER,
self.renderbuffer_id,
);
gl::BindRenderbuffer(gl::RENDERBUFFER, 0);
gl::Viewport(0, 0, width as i32, height as i32);
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
// Notify the frontend layer about the new FBO
let id = self.fbo_id;
self.get_frontend().set_gl_fbo(id);
// Resize the readback buffer
self.readback_buffer.resize((width * height) as usize, 0);
}
}
// Main loop
fn main_loop(&mut self) -> ! {
let frontend = self.get_frontend();
let av_info = frontend.get_av_info().expect("???");
let step_ms = (1.0 / av_info.timing.fps) * 1000.;
let step_duration = Duration::from_micros((step_ms * 1000.) as u64);
// Do the main loop
loop {
frontend.run_frame();
std::thread::sleep(step_duration);
}
}
}
impl FrontendInterface for App {
fn video_resize(&mut self, width: u32, height: u32) {
tracing::info!("Resized to {width}x{height}");
// Resize OpenGL resources if we need to.
if self.hw_render {
self.hw_gl_create_fbo(width, height);
}
self.rfb_server.resize(width as u16, height as u16);
}
fn video_update(&mut self, slice: &[u32], pitch: u32) {
//let framebuffer_size = self.get_frontend().get_size();
self.rfb_server.update_buffer(&slice, pitch, false);
}
fn video_update_gl(&mut self) {
let dimensions = self.get_frontend().get_size();
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
// I know this sucks but it works for this case.
gl::ReadPixels(
0,
0,
dimensions.0 as i32,
dimensions.1 as i32,
gl::RGBA,
gl::UNSIGNED_BYTE,
self.readback_buffer.as_mut_ptr() as *mut std::ffi::c_void,
);
self.rfb_server
.update_buffer(&self.readback_buffer[..], dimensions.0, true);
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
}
fn audio_sample(&mut self, _slice: &[i16], _size: usize) {}
fn input_poll(&mut self) {
self.pad.reset();
// Press all buttons the VNC server marked as pressed
let buttons = self.rfb_server.get_buttons();
for i in 0..buttons.len() {
if buttons[i] {
self.pad.press_button(i as u32, None);
}
}
}
fn hw_gl_init(&mut self) -> HwGlInitData {
if self.hw_render {
panic!("Cannot initalize HW rendering more than once");
}
unsafe {
// Initalize EGL
self.hw_gl_egl_init();
// load OpenGL functions (using EGL loader. We should probably check the one extension exists)
gl::load_with(|s| {
let str = std::ffi::CString::new(s).expect("Uhh huh.");
std::mem::transmute(egl::GetProcAddress(str.as_ptr()))
});
// set OpenGL debug message callback
gl::Enable(gl::DEBUG_OUTPUT);
gl::DebugMessageCallback(Some(opengl_message_callback), std::ptr::null());
// Create the initial FBO for the core to render to
let dimensions = self.get_frontend().get_size();
self.hw_gl_create_fbo(dimensions.0, dimensions.1);
}
self.hw_render = true;
return unsafe {
HwGlInitData {
get_proc_address: std::mem::transmute(egl::GetProcAddress as *mut std::ffi::c_void),
}
};
}
}
use app::*;
use rfb::RfbServerConfig;
fn main() -> Result<()> {
// Setup a tracing subscriber
@ -387,20 +23,32 @@ fn main() -> Result<()> {
tracing::subscriber::set_global_default(subscriber).unwrap();
let matches = command!()
.arg(arg!(--core <VALUE>).required(true))
// Not that it matters, but this is only really required for cores that require
// content to be loaded; that's most cores, but libretro does support the difference.
// TODO: A core will tell us if it requires content, if it's not provided we can yell and exit.
.arg(arg!(--rom <VALUE>).required(false))
.about("Headless VNC libretro frontend (with GPU rendering support)")
.arg(
arg!(--core <VALUE>)
.required(true)
.help("libretro core to load")
.short('c'),
)
.arg(
arg!(--rom <VALUE>)
.required(false)
.help("ROM to load into core")
.short('r'),
)
.arg(
arg!(--rfb_listen <ADDRESS>)
.required(false)
.help("VNC listen address")
.short('l')
.default_value("127.0.0.1"),
)
.arg(
arg!(--rfb_port <PORT>)
.value_parser(value_parser!(u16))
.required(true),
.required(true)
.help("VNC listen port")
.short('p'),
)
.get_matches();
@ -419,11 +67,11 @@ fn main() -> Result<()> {
listen_port: *matches.get_one::<u16>("rfb_port").unwrap(),
};
// Initalize the app
let mut app = App::new(rfb_config)?;
app.load_core(core_path)?;
// TODO: Make sure to fail if a core requests content but it was never provided
if let Some(rom_path) = matches.get_one::<String>("rom") {
app.load_game(rom_path)?
}