Unsafe localization + sanity checking

By localization, I mean making functions have unsafe blocks where they are actually doing unsafe things, rather than a blanket "unsafe fn", or moving safe operations (or functions made safe by localizing their unsafety) out to safe Rust where possible. This also adds opporturnity to annotate unsafe blocks in a better fashion, which is a nice side benefit.

Also, the helper to get a EGL Device platform display can now select any of the first 16 devices reported. For now this functionality is useless since we hardcode it to do the same thing it did before effectively, but later on it might be a good idea to expose it? Not Sure™️
This commit is contained in:
Lily Tsuru 2024-08-07 03:56:59 -04:00
parent 470aecf08e
commit b31feab847
2 changed files with 108 additions and 65 deletions

View file

@ -3,6 +3,8 @@ use crate::rfb::*;
use std::{path::Path, time::Duration};
use std::ptr::{addr_of_mut, null};
use retro_frontend::{
frontend::{Frontend, FrontendInterface, HwGlInitData},
input_devices::{InputDevice, RetroPad},
@ -44,7 +46,7 @@ pub struct App {
pad: RetroPad,
/// True if HW rendering is active.
hw_render: bool,
using_hardware_rendering: bool,
// EGL state
egl_display: egl::types::EGLDisplay,
@ -65,9 +67,9 @@ impl App {
frontend: None,
rfb_server: RfbServer::new(rfb_config)?,
pad: RetroPad::new(),
hw_render: false,
egl_display: std::ptr::null(),
egl_context: std::ptr::null(),
using_hardware_rendering: false,
egl_display: null(),
egl_context: null(),
texture_id: 0,
renderbuffer_id: 0,
fbo_id: 0,
@ -136,10 +138,12 @@ impl App {
}
/// Initalizes a headless EGL context for OpenGL rendering.
unsafe fn hw_gl_egl_init(&mut self) {
self.egl_display = egl::get_device_platform_display();
fn hw_gl_egl_init(&mut self) {
// Currently we assume the first device on the Device platform.
// In most cases (at least on NVIDIA), this is usually a real GPU.
self.egl_display = egl::get_device_platform_display(0);
self.egl_context = {
self.egl_context = unsafe {
const EGL_CONFIG_ATTRIBUTES: [egl::types::EGLenum; 13] = [
egl::SURFACE_TYPE,
egl::PBUFFER_BIT,
@ -160,26 +164,25 @@ impl App {
let mut egl_config_count: egl::EGLint = 0;
let mut config: egl::types::EGLConfig = std::ptr::null();
let mut config: egl::types::EGLConfig = null();
egl::Initialize(
self.egl_display,
std::ptr::addr_of_mut!(egl_major),
std::ptr::addr_of_mut!(egl_minor),
addr_of_mut!(egl_major),
addr_of_mut!(egl_minor),
);
egl::ChooseConfig(
self.egl_display,
EGL_CONFIG_ATTRIBUTES.as_ptr() as *const egl::EGLint,
std::ptr::addr_of_mut!(config),
addr_of_mut!(config),
1,
std::ptr::addr_of_mut!(egl_config_count),
addr_of_mut!(egl_config_count),
);
egl::BindAPI(egl::OPENGL_API);
let context =
egl::CreateContext(self.egl_display, config, egl::NO_CONTEXT, std::ptr::null());
let context = egl::CreateContext(self.egl_display, config, egl::NO_CONTEXT, null());
// Make the context current on the display so OpenGL routines "just work"
egl::MakeCurrent(self.egl_display, egl::NO_SURFACE, egl::NO_SURFACE, context);
@ -189,12 +192,13 @@ impl App {
}
/// Destroys EGL resources.
unsafe fn hw_gl_egl_exit(&mut self) {
if self.hw_render {
fn hw_gl_egl_exit(&mut self) {
if self.using_hardware_rendering {
// Delete FBO
self.hw_gl_delete_fbo();
// Release the EGL context we created before destroying it
unsafe {
egl::MakeCurrent(
self.egl_display,
egl::NO_SURFACE,
@ -203,21 +207,22 @@ impl App {
);
egl::DestroyContext(self.egl_display, self.egl_context);
egl::Terminate(self.egl_display);
self.egl_display = std::ptr::null();
self.egl_context = std::ptr::null();
}
self.egl_display = null();
self.egl_context = null();
}
}
/// Deletes all OpenGL FBO resources (the FBO itself, the render texture, and the renderbuffer used for depth)
fn hw_gl_delete_fbo(&mut self) {
unsafe {
gl::DeleteFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
gl::DeleteFramebuffers(1, addr_of_mut!(self.fbo_id));
self.fbo_id = 0;
gl::DeleteTextures(1, std::ptr::addr_of_mut!(self.texture_id));
gl::DeleteTextures(1, addr_of_mut!(self.texture_id));
self.texture_id = 0;
gl::DeleteRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
gl::DeleteRenderbuffers(1, addr_of_mut!(self.renderbuffer_id));
self.renderbuffer_id = 0;
}
}
@ -228,10 +233,10 @@ impl App {
self.hw_gl_delete_fbo();
}
gl::GenFramebuffers(1, std::ptr::addr_of_mut!(self.fbo_id));
gl::GenFramebuffers(1, addr_of_mut!(self.fbo_id));
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
gl::GenTextures(1, std::ptr::addr_of_mut!(self.texture_id));
gl::GenTextures(1, addr_of_mut!(self.texture_id));
gl::BindTexture(gl::TEXTURE_2D, self.texture_id);
gl::TexImage2D(
@ -243,10 +248,10 @@ impl App {
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
std::ptr::null(),
null(),
);
gl::GenRenderbuffers(1, std::ptr::addr_of_mut!(self.renderbuffer_id));
gl::GenRenderbuffers(1, addr_of_mut!(self.renderbuffer_id));
gl::BindRenderbuffer(gl::RENDERBUFFER, self.renderbuffer_id);
gl::RenderbufferStorage(
@ -308,8 +313,8 @@ impl FrontendInterface for App {
fn video_resize(&mut self, width: u32, height: u32) {
tracing::info!("Resized to {width}x{height}");
// Resize OpenGL resources if we need to.
if self.hw_render {
// Recreate the OpenGL FBO on resize.
if self.using_hardware_rendering {
self.hw_gl_create_fbo(width, height);
}
@ -324,10 +329,12 @@ impl FrontendInterface for App {
fn video_update_gl(&mut self) {
let dimensions = self.get_frontend().get_size();
// Read back the framebuffer with glReadPixels()
// I know it sucks but it works for this case.
// SAFETY: self.readback_buffer will always be allocated to the proper size before reaching here
unsafe {
gl::BindFramebuffer(gl::FRAMEBUFFER, self.fbo_id);
// I know this sucks but it works for this case.
gl::ReadPixels(
0,
0,
@ -338,11 +345,11 @@ impl FrontendInterface for App {
self.readback_buffer.as_mut_ptr() as *mut std::ffi::c_void,
);
self.rfb_server
.update_buffer(&self.readback_buffer[..], dimensions.0, true);
gl::BindFramebuffer(gl::FRAMEBUFFER, 0);
}
self.rfb_server
.update_buffer(&self.readback_buffer[..], dimensions.0, true);
}
fn audio_sample(&mut self, _slice: &[i16], _size: usize) {}
@ -360,15 +367,27 @@ impl FrontendInterface for App {
}
fn hw_gl_init(&mut self) -> HwGlInitData {
if self.hw_render {
panic!("Cannot initalize HW rendering more than once");
if self.using_hardware_rendering {
panic!("Cannot initalize HW rendering while already initalized");
}
unsafe {
// Initalize EGL
self.hw_gl_egl_init();
// load OpenGL functions (using EGL loader. We should probably check the one extension exists)
let extensions = egl::get_extensions(self.egl_display);
tracing::debug!("Supported EGL extensions: {:?}", extensions);
// Check for EGL_KHR_get_all_proc_addresses, so we can use eglGetProcAddress() to load OpenGL functions
// TODO: instead of panicing, we should probably make this return a Option<_>, and treat None on the frontend side
// as a failure.
if !extensions.contains(&"EGL_KHR_get_all_proc_addresses".into()) {
tracing::error!("Your graphics driver doesn't support the EGL_KHR_get_all_proc_addresses extension. Failing");
panic!("Cannot initalize OpenGL rendering");
}
unsafe {
// Load OpenGL functions using the EGL loader.
gl::load_with(|s| {
let str = std::ffi::CString::new(s).expect("Uhh huh.");
std::mem::transmute(egl::GetProcAddress(str.as_ptr()))
@ -376,14 +395,14 @@ impl FrontendInterface for App {
// set OpenGL debug message callback
gl::Enable(gl::DEBUG_OUTPUT);
gl::DebugMessageCallback(Some(opengl_message_callback), std::ptr::null());
gl::DebugMessageCallback(Some(opengl_message_callback), null());
}
// Create the initial FBO for the core to render to
let dimensions = self.get_frontend().get_size();
self.hw_gl_create_fbo(dimensions.0, dimensions.1);
}
self.hw_render = true;
self.using_hardware_rendering = true;
return unsafe {
HwGlInitData {

View file

@ -28,30 +28,54 @@ mod egl_impl {
pub type QueryDevicesExt = unsafe extern "C" fn(
max_devices: self::types::EGLint,
devices: *mut self::types::EGLDeviceEXT,
devices: *mut EGLint,
devices_present: *mut EGLint,
) -> types::EGLBoolean;
pub fn get_extensions(display: types::EGLDisplay) -> Vec<String> {
// SAFETY: eglQueryString() should never return a null pointer.
// If it does your video drivers are more than likely broken beyond repair.
unsafe {
let extensions_ptr = QueryString(display, EXTENSIONS as i32);
assert!(!extensions_ptr.is_null());
let extensions_str = std::ffi::CStr::from_ptr(extensions_ptr)
.to_str()
.expect("Invalid EGL_EXTENSIONS");
extensions_str
.split(' ')
.map(|str| str.to_string())
.collect()
}
}
/// A helper to get a display on the EGL "Device" platform, which allows headless rendering,
/// without any window system interface.
pub unsafe fn get_device_platform_display() -> types::EGLDisplay {
pub fn get_device_platform_display(index: usize) -> types::EGLDisplay {
const NR_DEVICES_MAX: usize = 16;
let mut devices: [types::EGLDeviceEXT; NR_DEVICES_MAX] = [std::ptr::null(); NR_DEVICES_MAX];
let mut nr_devices_real: EGLint = 0;
// This is how many devices are actually present,
let mut devices_present: EGLint = 0;
assert!(index <= NR_DEVICES_MAX, "More than {NR_DEVICES_MAX} devices are not supported right now");
unsafe {
// TODO: These should probbaly be using CStr like above.
let query_devices_ext: QueryDevicesExt =
std::mem::transmute(GetProcAddress(b"eglQueryDevicesEXT\0".as_ptr() as *const i8));
let get_platform_display_ext: GetPlatformDisplayExt = std::mem::transmute(GetProcAddress(
b"eglGetPlatformDisplayEXT\0".as_ptr() as *const i8,
));
let get_platform_display_ext: GetPlatformDisplayExt = std::mem::transmute(
GetProcAddress(b"eglGetPlatformDisplayEXT\0".as_ptr() as *const i8),
);
(query_devices_ext)(
NR_DEVICES_MAX as i32,
devices.as_mut_ptr(),
std::ptr::addr_of_mut!(nr_devices_real),
std::ptr::addr_of_mut!(devices_present),
);
(get_platform_display_ext)(PLATFORM_DEVICE_EXT, devices[0], std::ptr::null())
(get_platform_display_ext)(PLATFORM_DEVICE_EXT, devices[index], std::ptr::null())
}
}
// link EGL as a library dependency