refactor(desktop): decompose monolithic main.rs into layered modules
Some checks failed
CI / rust (push) Has been cancelled

Split DesktopApp into input, audio, video, scheduling, and app modules.
Migrate DesktopApp from manual pause/resume logic to library ClientRuntime.
This commit is contained in:
2026-03-18 12:52:08 +03:00
parent 2878187180
commit 38a62b6f93
6 changed files with 483 additions and 456 deletions

View File

@@ -0,0 +1,100 @@
use std::path::Path;
use std::sync::Arc;
use std::sync::atomic::AtomicU32;
use std::time::Duration;
use nesemu::prelude::{ClientRuntime, EmulationState, HostConfig};
use nesemu::{FrameClock, NesRuntime, VideoMode};
use crate::audio::CpalAudioSink;
use crate::input::InputState;
use crate::video::BufferedVideo;
use crate::SAMPLE_RATE;
pub(crate) struct DesktopApp {
session: Option<ClientRuntime<Box<dyn FrameClock>>>,
input: InputState,
audio: CpalAudioSink,
video: BufferedVideo,
}
impl DesktopApp {
pub(crate) fn new(volume: Arc<AtomicU32>) -> Self {
Self {
session: None,
input: InputState::default(),
audio: CpalAudioSink::new(volume),
video: BufferedVideo::new(),
}
}
pub(crate) fn load_rom_from_path(
&mut self,
path: &Path,
) -> Result<(), Box<dyn std::error::Error>> {
let data = std::fs::read(path)?;
let runtime = NesRuntime::from_rom_bytes(&data)?;
let config = HostConfig::new(SAMPLE_RATE, false);
let session = ClientRuntime::with_config(runtime, config);
self.session = Some(session);
self.audio.clear();
Ok(())
}
pub(crate) fn reset(&mut self) {
if let Some(session) = self.session.as_mut() {
session.host_mut().runtime_mut().reset();
self.audio.clear();
session.resume();
}
}
pub(crate) fn is_loaded(&self) -> bool {
self.session.is_some()
}
pub(crate) fn state(&self) -> EmulationState {
self.session
.as_ref()
.map(|s| s.state())
.unwrap_or(EmulationState::Paused)
}
pub(crate) fn toggle_pause(&mut self) {
if let Some(session) = self.session.as_mut() {
match session.state() {
EmulationState::Running => session.pause(),
_ => session.resume(),
}
}
}
pub(crate) fn tick(&mut self) {
let Some(session) = self.session.as_mut() else {
return;
};
match session.tick(&mut self.input, &mut self.video, &mut self.audio) {
Ok(_) => {}
Err(err) => {
eprintln!("Frame execution error: {err}");
session.pause();
}
}
}
pub(crate) fn frame_rgba(&self) -> &[u8] {
self.video.frame_rgba()
}
pub(crate) fn frame_interval(&self) -> Duration {
self.session
.as_ref()
.map(|s| s.host().runtime().video_mode().frame_duration())
.unwrap_or_else(|| VideoMode::Ntsc.frame_duration())
}
pub(crate) fn input_mut(&mut self) -> &mut InputState {
&mut self.input
}
}

View File

@@ -0,0 +1,146 @@
use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering as AtomicOrdering};
use nesemu::RingBuffer;
use crate::SAMPLE_RATE;
pub(crate) const AUDIO_RING_CAPACITY: usize = 4096;
pub(crate) struct CpalAudioSink {
_stream: Option<cpal::Stream>,
ring: Arc<RingBuffer>,
_volume: Arc<AtomicU32>,
}
impl CpalAudioSink {
pub(crate) fn new(volume: Arc<AtomicU32>) -> Self {
let ring = Arc::new(RingBuffer::new(AUDIO_RING_CAPACITY));
Self {
_stream: None,
ring,
_volume: volume,
}
}
fn ensure_stream(&mut self) {
if self._stream.is_none() {
let ring_for_cb = Arc::clone(&self.ring);
let vol_for_cb = Arc::clone(&self._volume);
self._stream = Self::try_build_stream(ring_for_cb, vol_for_cb);
}
}
fn try_build_stream(ring: Arc<RingBuffer>, volume: Arc<AtomicU32>) -> Option<cpal::Stream> {
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
let host = cpal::default_host();
let device = match host.default_output_device() {
Some(d) => d,
None => {
eprintln!("No audio output device found — running without sound");
return None;
}
};
let config = cpal_stream_config();
let stream = match device.build_output_stream(
&config,
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
let read = ring.pop(data);
for sample in &mut data[read..] {
*sample = 0.0;
}
let vol = f32::from_bits(volume.load(AtomicOrdering::Relaxed));
for sample in &mut data[..read] {
*sample *= vol;
}
},
move |err| {
eprintln!("Audio stream error: {err}");
},
None,
) {
Ok(s) => s,
Err(err) => {
eprintln!("Failed to build audio stream: {err} — running without sound");
return None;
}
};
if let Err(err) = stream.play() {
eprintln!("Failed to start audio stream: {err} — running without sound");
return None;
}
Some(stream)
}
pub(crate) fn clear(&self) {
self.ring.clear();
}
}
impl nesemu::AudioOutput for CpalAudioSink {
fn push_samples(&mut self, samples: &[f32]) {
self.ensure_stream();
self.ring.push(samples);
}
}
fn cpal_stream_config() -> cpal::StreamConfig {
cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
buffer_size: cpal::BufferSize::Default,
}
}
#[cfg(test)]
use nesemu::VideoMode;
#[cfg(test)]
fn audio_ring_latency_ms(capacity: usize, sample_rate: u32) -> f64 {
((capacity.saturating_sub(1)) as f64 / sample_rate as f64) * 1000.0
}
#[cfg(test)]
const AUDIO_CALLBACK_FRAMES: u32 = 256;
#[cfg(test)]
fn required_audio_ring_capacity(sample_rate: u32, mode: VideoMode) -> usize {
let samples_per_frame = (sample_rate as f64 / mode.frame_hz()).ceil() as usize;
samples_per_frame + AUDIO_CALLBACK_FRAMES as usize + 1
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn desktop_audio_ring_budget_stays_below_100ms() {
let latency_ms = audio_ring_latency_ms(AUDIO_RING_CAPACITY, SAMPLE_RATE);
let max_budget_ms = 100.0;
assert!(
latency_ms <= max_budget_ms,
"desktop audio ring latency budget too high: {latency_ms:.2}ms"
);
}
#[test]
fn desktop_audio_uses_default_buffer_size() {
let config = cpal_stream_config();
assert_eq!(config.buffer_size, cpal::BufferSize::Default);
}
#[test]
fn desktop_audio_ring_has_frame_burst_headroom() {
let required = required_audio_ring_capacity(SAMPLE_RATE, VideoMode::Ntsc);
assert!(
AUDIO_RING_CAPACITY >= required,
"audio ring too small for frame burst: capacity={}, required={required}",
AUDIO_RING_CAPACITY,
);
}
}

View File

@@ -0,0 +1,30 @@
use gtk4::gdk;
use nesemu::{InputProvider, JoypadButton, JoypadButtons, set_button_pressed};
#[derive(Default)]
pub(crate) struct InputState {
buttons: JoypadButtons,
}
impl InputState {
pub(crate) fn set_key_state(&mut self, key: gdk::Key, pressed: bool) {
let button = match key {
gdk::Key::Up => JoypadButton::Up,
gdk::Key::Down => JoypadButton::Down,
gdk::Key::Left => JoypadButton::Left,
gdk::Key::Right => JoypadButton::Right,
gdk::Key::x | gdk::Key::X => JoypadButton::A,
gdk::Key::z | gdk::Key::Z => JoypadButton::B,
gdk::Key::Return => JoypadButton::Start,
gdk::Key::Shift_L | gdk::Key::Shift_R => JoypadButton::Select,
_ => return,
};
set_button_pressed(&mut self.buttons, button, pressed);
}
}
impl InputProvider for InputState {
fn poll_buttons(&mut self) -> JoypadButtons {
self.buttons
}
}

View File

@@ -1,26 +1,32 @@
mod app;
mod audio;
mod input;
mod scheduling;
mod video;
use std::cell::RefCell;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::Arc;
use std::sync::atomic::{AtomicU32, Ordering as AtomicOrdering};
use std::time::{Duration, Instant};
use std::time::Instant;
use gtk::gdk;
use gtk::gio;
use gtk::glib;
use gtk::prelude::*;
use gtk4 as gtk;
use nesemu::prelude::{EmulationState, HostConfig, RuntimeHostLoop};
use nesemu::{
FRAME_HEIGHT, FRAME_RGBA_BYTES, FRAME_WIDTH, FrameClock, InputProvider, JoypadButton,
JoypadButtons, NesRuntime, RingBuffer, VideoMode, VideoOutput, set_button_pressed,
};
use nesemu::prelude::EmulationState;
use nesemu::{FRAME_HEIGHT, FRAME_RGBA_BYTES, FRAME_WIDTH};
use app::DesktopApp;
use scheduling::DesktopFrameScheduler;
const APP_ID: &str = "org.nesemu.desktop";
const TITLE: &str = "NES Emulator";
const SCALE: i32 = 3;
const SAMPLE_RATE: u32 = 48_000;
const AUDIO_RING_CAPACITY: usize = 4096;
fn main() {
if std::env::var_os("GSK_RENDERER").is_none() {
unsafe {
@@ -131,48 +137,7 @@ fn build_ui(app: &gtk::Application, initial_rom: Option<PathBuf>) {
let frame_for_draw = Rc::clone(&frame_for_draw);
drawing_area.set_draw_func(move |_da, cr, width, height| {
let frame = frame_for_draw.borrow();
let stride = cairo::Format::ARgb32
.stride_for_width(FRAME_WIDTH as u32)
.unwrap();
let mut argb = vec![0u8; stride as usize * FRAME_HEIGHT];
for y in 0..FRAME_HEIGHT {
for x in 0..FRAME_WIDTH {
let src = (y * FRAME_WIDTH + x) * 4;
let dst = y * stride as usize + x * 4;
let r = frame[src];
let g = frame[src + 1];
let b = frame[src + 2];
let a = frame[src + 3];
argb[dst] = b;
argb[dst + 1] = g;
argb[dst + 2] = r;
argb[dst + 3] = a;
}
}
let surface = cairo::ImageSurface::create_for_data(
argb,
cairo::Format::ARgb32,
FRAME_WIDTH as i32,
FRAME_HEIGHT as i32,
stride,
)
.expect("Failed to create Cairo surface");
// Fill background black
cr.set_source_rgb(0.0, 0.0, 0.0);
let _ = cr.paint();
let sx = width as f64 / FRAME_WIDTH as f64;
let sy = height as f64 / FRAME_HEIGHT as f64;
let scale = sx.min(sy);
let offset_x = (width as f64 - FRAME_WIDTH as f64 * scale) / 2.0;
let offset_y = (height as f64 - FRAME_HEIGHT as f64 * scale) / 2.0;
cr.translate(offset_x, offset_y);
cr.scale(scale, scale);
let _ = cr.set_source_surface(&surface, 0.0, 0.0);
cr.source().set_filter(cairo::Filter::Nearest);
let _ = cr.paint();
video::draw_frame(&frame, cr, width, height);
});
}
@@ -434,410 +399,3 @@ fn rom_filename(path: &Path) -> String {
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| "Unknown".into())
}
// ---------------------------------------------------------------------------
// Input
// ---------------------------------------------------------------------------
#[derive(Default)]
struct InputState {
buttons: JoypadButtons,
}
impl InputState {
fn set_key_state(&mut self, key: gdk::Key, pressed: bool) {
let button = match key {
gdk::Key::Up => JoypadButton::Up,
gdk::Key::Down => JoypadButton::Down,
gdk::Key::Left => JoypadButton::Left,
gdk::Key::Right => JoypadButton::Right,
gdk::Key::x | gdk::Key::X => JoypadButton::A,
gdk::Key::z | gdk::Key::Z => JoypadButton::B,
gdk::Key::Return => JoypadButton::Start,
gdk::Key::Shift_L | gdk::Key::Shift_R => JoypadButton::Select,
_ => return,
};
set_button_pressed(&mut self.buttons, button, pressed);
}
}
impl InputProvider for InputState {
fn poll_buttons(&mut self) -> JoypadButtons {
self.buttons
}
}
// ---------------------------------------------------------------------------
// Audio (cpal backend)
// ---------------------------------------------------------------------------
struct CpalAudioSink {
_stream: Option<cpal::Stream>,
ring: Arc<RingBuffer>,
_volume: Arc<AtomicU32>,
}
impl CpalAudioSink {
fn new(volume: Arc<AtomicU32>) -> Self {
let ring = Arc::new(RingBuffer::new(AUDIO_RING_CAPACITY));
// Do NOT open the audio device here. Creating a cpal stream at startup
// forces the system audio server (PipeWire/PulseAudio) to allocate
// resources and may disrupt other running audio applications even when
// the emulator is idle. The stream is opened lazily on the first
// push_samples call, i.e. only when a ROM is actually playing.
Self {
_stream: None,
ring,
_volume: volume,
}
}
fn ensure_stream(&mut self) {
if self._stream.is_none() {
let ring_for_cb = Arc::clone(&self.ring);
let vol_for_cb = Arc::clone(&self._volume);
self._stream = Self::try_build_stream(ring_for_cb, vol_for_cb);
}
}
fn try_build_stream(ring: Arc<RingBuffer>, volume: Arc<AtomicU32>) -> Option<cpal::Stream> {
use cpal::traits::{DeviceTrait, HostTrait, StreamTrait};
let host = cpal::default_host();
let device = match host.default_output_device() {
Some(d) => d,
None => {
eprintln!("No audio output device found — running without sound");
return None;
}
};
let config = cpal_stream_config();
let stream = match device.build_output_stream(
&config,
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
let read = ring.pop(data);
for sample in &mut data[read..] {
*sample = 0.0;
}
let vol = f32::from_bits(volume.load(AtomicOrdering::Relaxed));
for sample in &mut data[..read] {
*sample *= vol;
}
},
move |err| {
eprintln!("Audio stream error: {err}");
},
None,
) {
Ok(s) => s,
Err(err) => {
eprintln!("Failed to build audio stream: {err} — running without sound");
return None;
}
};
if let Err(err) = stream.play() {
eprintln!("Failed to start audio stream: {err} — running without sound");
return None;
}
Some(stream)
}
/// Reset the ring buffer. Note: the cpal callback may still be calling
/// `pop()` concurrently; in practice this is benign — at worst a few stale
/// samples are played during the ROM load / reset transition.
fn clear(&self) {
self.ring.clear();
}
}
impl nesemu::AudioOutput for CpalAudioSink {
fn push_samples(&mut self, samples: &[f32]) {
self.ensure_stream();
self.ring.push(samples);
}
}
#[cfg(test)]
fn audio_ring_latency_ms(capacity: usize, sample_rate: u32) -> f64 {
((capacity.saturating_sub(1)) as f64 / sample_rate as f64) * 1000.0
}
#[cfg(test)]
fn required_audio_ring_capacity(sample_rate: u32, mode: VideoMode) -> usize {
let samples_per_frame = (sample_rate as f64 / mode.frame_hz()).ceil() as usize;
samples_per_frame + AUDIO_CALLBACK_FRAMES as usize + 1
}
fn cpal_stream_config() -> cpal::StreamConfig {
cpal::StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(SAMPLE_RATE),
// Use the audio server's default buffer size to avoid forcing the entire
// PipeWire/PulseAudio graph into low-latency mode, which would disturb
// other audio applications (browsers, media players, etc.).
buffer_size: cpal::BufferSize::Default,
}
}
struct DesktopFrameScheduler {
next_deadline: Option<Instant>,
}
impl DesktopFrameScheduler {
fn new() -> Self {
Self {
next_deadline: None,
}
}
fn reset_timing(&mut self) {
self.next_deadline = None;
}
fn delay_until_next_frame(&mut self, now: Instant, _interval: Duration) -> Duration {
match self.next_deadline {
None => {
self.next_deadline = Some(now);
Duration::ZERO
}
Some(deadline) if now < deadline => deadline - now,
Some(_) => Duration::ZERO,
}
}
fn mark_frame_complete(&mut self, now: Instant, interval: Duration) {
let mut next_deadline = self.next_deadline.unwrap_or(now) + interval;
while next_deadline <= now {
next_deadline += interval;
}
self.next_deadline = Some(next_deadline);
}
}
struct BufferedVideo {
frame_rgba: Vec<u8>,
}
impl BufferedVideo {
fn new() -> Self {
Self {
frame_rgba: vec![0; FRAME_RGBA_BYTES],
}
}
fn frame_rgba(&self) -> &[u8] {
&self.frame_rgba
}
}
impl VideoOutput for BufferedVideo {
fn present_rgba(&mut self, frame: &[u8], width: usize, height: usize) {
if width != FRAME_WIDTH || height != FRAME_HEIGHT || frame.len() != FRAME_RGBA_BYTES {
return;
}
self.frame_rgba.copy_from_slice(frame);
}
}
// ---------------------------------------------------------------------------
// Application state
// ---------------------------------------------------------------------------
struct DesktopApp {
host: Option<RuntimeHostLoop<Box<dyn FrameClock>>>,
input: InputState,
audio: CpalAudioSink,
video: BufferedVideo,
state: EmulationState,
}
impl DesktopApp {
fn new(volume: Arc<AtomicU32>) -> Self {
Self {
host: None,
input: InputState::default(),
audio: CpalAudioSink::new(volume),
video: BufferedVideo::new(),
state: EmulationState::Paused,
}
}
fn load_rom_from_path(&mut self, path: &Path) -> Result<(), Box<dyn std::error::Error>> {
let data = std::fs::read(path)?;
let runtime = NesRuntime::from_rom_bytes(&data)?;
let config = HostConfig::new(SAMPLE_RATE, false);
self.host = Some(RuntimeHostLoop::with_config(runtime, config));
self.audio.clear();
self.state = EmulationState::Running;
Ok(())
}
fn reset(&mut self) {
if let Some(host) = self.host.as_mut() {
host.runtime_mut().reset();
self.audio.clear();
self.state = EmulationState::Running;
}
}
fn is_loaded(&self) -> bool {
self.host.is_some()
}
fn state(&self) -> EmulationState {
self.state
}
fn toggle_pause(&mut self) {
self.state = match self.state {
EmulationState::Running => EmulationState::Paused,
EmulationState::Paused => EmulationState::Running,
_ => EmulationState::Paused,
};
}
fn tick(&mut self) {
if self.state != EmulationState::Running {
return;
}
let Some(host) = self.host.as_mut() else {
return;
};
match host.run_frame_unpaced(&mut self.input, &mut self.video, &mut self.audio) {
Ok(_) => {}
Err(err) => {
eprintln!("Frame execution error: {err}");
self.state = EmulationState::Paused;
}
}
}
fn frame_rgba(&self) -> &[u8] {
self.video.frame_rgba()
}
fn frame_interval(&self) -> Duration {
self.host
.as_ref()
.map(|host| host.runtime().video_mode().frame_duration())
.unwrap_or_else(|| VideoMode::Ntsc.frame_duration())
}
fn input_mut(&mut self) -> &mut InputState {
&mut self.input
}
}
#[cfg(test)]
mod tests {
use super::*;
use nesemu::{FRAME_HEIGHT, FRAME_WIDTH, VideoOutput};
use std::time::Instant;
#[test]
fn frame_scheduler_waits_until_frame_deadline() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert!(
scheduler.delay_until_next_frame(start + Duration::from_millis(1), interval)
> Duration::ZERO
);
assert_eq!(
scheduler.delay_until_next_frame(start + interval, interval),
Duration::ZERO
);
}
#[test]
fn buffered_video_captures_presented_frame() {
let mut video = BufferedVideo::new();
let mut frame = vec![0u8; FRAME_RGBA_BYTES];
frame[0] = 0x12;
frame[1] = 0x34;
frame[2] = 0x56;
frame[3] = 0x78;
video.present_rgba(&frame, FRAME_WIDTH, FRAME_HEIGHT);
assert_eq!(video.frame_rgba(), frame.as_slice());
}
#[test]
fn frame_scheduler_reset_restarts_from_immediate_tick() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert!(scheduler.delay_until_next_frame(start, interval) > Duration::ZERO);
scheduler.reset_timing();
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
}
#[test]
fn frame_scheduler_reports_zero_delay_when_late() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert_eq!(
scheduler.delay_until_next_frame(start + interval + Duration::from_millis(2), interval),
Duration::ZERO
);
}
#[test]
fn desktop_audio_ring_budget_stays_below_100ms() {
let latency_ms = audio_ring_latency_ms(AUDIO_RING_CAPACITY, SAMPLE_RATE);
let max_budget_ms = 100.0;
assert!(
latency_ms <= max_budget_ms,
"desktop audio ring latency budget too high: {latency_ms:.2}ms"
);
}
#[test]
fn desktop_audio_uses_default_buffer_size() {
let config = cpal_stream_config();
// Default lets the audio server (PipeWire/PulseAudio) choose the
// buffer size, preventing interference with other audio applications.
assert_eq!(config.buffer_size, cpal::BufferSize::Default);
}
#[test]
fn desktop_audio_ring_has_frame_burst_headroom() {
let required = required_audio_ring_capacity(SAMPLE_RATE, VideoMode::Ntsc);
assert!(
AUDIO_RING_CAPACITY >= required,
"audio ring too small for frame burst: capacity={}, required={required}",
AUDIO_RING_CAPACITY,
);
}
}

View File

@@ -0,0 +1,104 @@
use std::time::{Duration, Instant};
pub(crate) struct DesktopFrameScheduler {
next_deadline: Option<Instant>,
}
impl DesktopFrameScheduler {
pub(crate) fn new() -> Self {
Self {
next_deadline: None,
}
}
pub(crate) fn reset_timing(&mut self) {
self.next_deadline = None;
}
pub(crate) fn delay_until_next_frame(
&mut self,
now: Instant,
_interval: Duration,
) -> Duration {
match self.next_deadline {
None => {
self.next_deadline = Some(now);
Duration::ZERO
}
Some(deadline) if now < deadline => deadline - now,
Some(_) => Duration::ZERO,
}
}
pub(crate) fn mark_frame_complete(&mut self, now: Instant, interval: Duration) {
let mut next_deadline = self.next_deadline.unwrap_or(now) + interval;
while next_deadline <= now {
next_deadline += interval;
}
self.next_deadline = Some(next_deadline);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn frame_scheduler_waits_until_frame_deadline() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert!(
scheduler.delay_until_next_frame(start + Duration::from_millis(1), interval)
> Duration::ZERO
);
assert_eq!(
scheduler.delay_until_next_frame(start + interval, interval),
Duration::ZERO
);
}
#[test]
fn frame_scheduler_reset_restarts_from_immediate_tick() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert!(scheduler.delay_until_next_frame(start, interval) > Duration::ZERO);
scheduler.reset_timing();
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
}
#[test]
fn frame_scheduler_reports_zero_delay_when_late() {
let mut scheduler = DesktopFrameScheduler::new();
let start = Instant::now();
let interval = Duration::from_micros(16_639);
assert_eq!(
scheduler.delay_until_next_frame(start, interval),
Duration::ZERO
);
scheduler.mark_frame_complete(start, interval);
assert_eq!(
scheduler.delay_until_next_frame(start + interval + Duration::from_millis(2), interval),
Duration::ZERO
);
}
}

View File

@@ -0,0 +1,89 @@
use nesemu::{FRAME_HEIGHT, FRAME_RGBA_BYTES, FRAME_WIDTH, VideoOutput};
pub(crate) struct BufferedVideo {
frame_rgba: Vec<u8>,
}
impl BufferedVideo {
pub(crate) fn new() -> Self {
Self {
frame_rgba: vec![0; FRAME_RGBA_BYTES],
}
}
pub(crate) fn frame_rgba(&self) -> &[u8] {
&self.frame_rgba
}
}
impl VideoOutput for BufferedVideo {
fn present_rgba(&mut self, frame: &[u8], width: usize, height: usize) {
if width != FRAME_WIDTH || height != FRAME_HEIGHT || frame.len() != FRAME_RGBA_BYTES {
return;
}
self.frame_rgba.copy_from_slice(frame);
}
}
pub(crate) fn draw_frame(frame: &[u8], cr: &cairo::Context, width: i32, height: i32) {
let stride = cairo::Format::ARgb32
.stride_for_width(FRAME_WIDTH as u32)
.unwrap();
let mut argb = vec![0u8; stride as usize * FRAME_HEIGHT];
for y in 0..FRAME_HEIGHT {
for x in 0..FRAME_WIDTH {
let src = (y * FRAME_WIDTH + x) * 4;
let dst = y * stride as usize + x * 4;
let r = frame[src];
let g = frame[src + 1];
let b = frame[src + 2];
let a = frame[src + 3];
argb[dst] = b;
argb[dst + 1] = g;
argb[dst + 2] = r;
argb[dst + 3] = a;
}
}
let surface = cairo::ImageSurface::create_for_data(
argb,
cairo::Format::ARgb32,
FRAME_WIDTH as i32,
FRAME_HEIGHT as i32,
stride,
)
.expect("Failed to create Cairo surface");
cr.set_source_rgb(0.0, 0.0, 0.0);
let _ = cr.paint();
let sx = width as f64 / FRAME_WIDTH as f64;
let sy = height as f64 / FRAME_HEIGHT as f64;
let scale = sx.min(sy);
let offset_x = (width as f64 - FRAME_WIDTH as f64 * scale) / 2.0;
let offset_y = (height as f64 - FRAME_HEIGHT as f64 * scale) / 2.0;
cr.translate(offset_x, offset_y);
cr.scale(scale, scale);
let _ = cr.set_source_surface(&surface, 0.0, 0.0);
cr.source().set_filter(cairo::Filter::Nearest);
let _ = cr.paint();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn buffered_video_captures_presented_frame() {
let mut video = BufferedVideo::new();
let mut frame = vec![0u8; FRAME_RGBA_BYTES];
frame[0] = 0x12;
frame[1] = 0x34;
frame[2] = 0x56;
frame[3] = 0x78;
video.present_rgba(&frame, FRAME_WIDTH, FRAME_HEIGHT);
assert_eq!(video.frame_rgba(), frame.as_slice());
}
}