diff --git a/src/context/mod.rs b/src/context/mod.rs index 48f960b1..e9f31b98 100644 --- a/src/context/mod.rs +++ b/src/context/mod.rs @@ -36,9 +36,15 @@ pub(crate) const LISTENER_AUDIO_PARAM_IDS: [AudioParamId; 9] = [ /// Unique identifier for audio nodes. /// /// Used for internal bookkeeping. -#[derive(Debug, Hash, PartialEq, Eq, Clone, Copy)] +#[derive(Hash, PartialEq, Eq, Clone, Copy)] pub(crate) struct AudioNodeId(pub u64); +impl std::fmt::Debug for AudioNodeId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "AudioNodeId({})", self.0) + } +} + /// Unique identifier for audio params. /// /// Store these in your `AudioProcessor` to get access to `AudioParam` values. diff --git a/src/context/online.rs b/src/context/online.rs index aefb5d9d..fd81a045 100644 --- a/src/context/online.rs +++ b/src/context/online.rs @@ -3,7 +3,7 @@ use std::error::Error; use std::sync::Mutex; use crate::context::{AudioContextState, BaseAudioContext, ConcreteBaseAudioContext}; -use crate::events::{EventDispatch, EventHandler, EventType}; +use crate::events::{EventDispatch, EventHandler, EventPayload, EventType}; use crate::io::{self, AudioBackendManager, ControlThreadInit, RenderThreadInit}; use crate::media_devices::{enumerate_devices_sync, MediaDeviceInfoKind}; use crate::media_streams::{MediaStream, MediaStreamTrack}; @@ -344,6 +344,39 @@ impl AudioContext { self.base().clear_event_handler(EventType::SinkChange); } + #[allow(clippy::missing_panics_doc)] + #[doc(hidden)] // Method signature might change in the future + pub fn run_diagnostics(&self, callback: F) { + let mut buffer = Vec::with_capacity(32 * 1024); + { + let backend = self.backend_manager.lock().unwrap(); + use std::io::Write; + writeln!(&mut buffer, "backend: {}", backend.name()).ok(); + writeln!(&mut buffer, "sink id: {}", backend.sink_id()).ok(); + writeln!( + &mut buffer, + "output latency: {:.6}", + backend.output_latency() + ) + .ok(); + } + let callback = move |v| match v { + EventPayload::Diagnostics(v) => { + let s = String::from_utf8(v).unwrap(); + callback(s); + } + _ => unreachable!(), + }; + + self.base().set_event_handler( + EventType::Diagnostics, + EventHandler::Once(Box::new(callback)), + ); + + self.base() + .send_control_msg(ControlMessage::RunDiagnostics { buffer }); + } + /// Suspends the progression of time in the audio context. /// /// This will temporarily halt audio hardware access and reducing CPU/battery usage in the diff --git a/src/events.rs b/src/events.rs index 179c5978..bc797eb3 100644 --- a/src/events.rs +++ b/src/events.rs @@ -21,6 +21,7 @@ pub(crate) enum EventType { SinkChange, RenderCapacity, ProcessorError(AudioNodeId), + Diagnostics, } /// The Error Event interface @@ -39,6 +40,7 @@ pub(crate) enum EventPayload { None, RenderCapacity(AudioRenderCapacityEvent), ProcessorError(ErrorEvent), + Diagnostics(Vec), } pub(crate) struct EventDispatch { @@ -74,6 +76,13 @@ impl EventDispatch { payload: EventPayload::ProcessorError(value), } } + + pub fn diagnostics(value: Vec) -> Self { + EventDispatch { + type_: EventType::Diagnostics, + payload: EventPayload::Diagnostics(value), + } + } } pub(crate) enum EventHandler { diff --git a/src/io/mod.rs b/src/io/mod.rs index 15d0feba..a6cc32f6 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -133,6 +133,11 @@ pub(crate) fn build_input(options: AudioContextOptions) -> MediaStream { /// Interface for audio backends pub(crate) trait AudioBackendManager: Send + Sync + 'static { + /// Name of the concrete implementation - for debug purposes + fn name(&self) -> &'static str { + std::any::type_name::() + } + /// Setup a new output stream (speakers) fn build_output(options: AudioContextOptions, render_thread_init: RenderThreadInit) -> Self where diff --git a/src/message.rs b/src/message.rs index f1bf7e59..6dec766d 100644 --- a/src/message.rs +++ b/src/message.rs @@ -52,4 +52,7 @@ pub(crate) enum ControlMessage { id: AudioNodeId, msg: llq::Node>, }, + + /// Request a diagnostic report of the audio graph + RunDiagnostics { buffer: Vec }, } diff --git a/src/render/graph.rs b/src/render/graph.rs index 49db3b24..b2b5374c 100644 --- a/src/render/graph.rs +++ b/src/render/graph.rs @@ -20,6 +20,21 @@ struct OutgoingEdge { other_index: usize, } +impl std::fmt::Debug for OutgoingEdge { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut format = f.debug_struct("OutgoingEdge"); + format + .field("self_index", &self.self_index) + .field("other_id", &self.other_id); + if self.other_index == usize::MAX { + format.field("other_index", &"HIDDEN"); + } else { + format.field("other_index", &self.other_index); + } + format.finish() + } +} + /// Renderer Node in the Audio Graph pub struct Node { /// AudioNodeId, to be sent back to the control thread when this node is dropped @@ -42,6 +57,19 @@ pub struct Node { cycle_breaker: bool, } +impl std::fmt::Debug for Node { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Node") + .field("id", &self.reclaim_id.as_deref()) + .field("processor", &self.processor) + .field("channel_config", &self.channel_config) + .field("outgoing_edges", &self.outgoing_edges) + .field("free_when_finished", &self.free_when_finished) + .field("cycle_breaker", &self.cycle_breaker) + .finish_non_exhaustive() + } +} + impl Node { /// Render an audio quantum fn process(&mut self, params: AudioParamValues<'_>, scope: &RenderScope) -> bool { @@ -99,6 +127,15 @@ pub(crate) struct Graph { cycle_breakers: Vec, } +impl std::fmt::Debug for Graph { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Graph") + .field("nodes", &self.nodes) + .field("ordered", &self.ordered) + .finish_non_exhaustive() + } +} + impl Graph { pub fn new(reclaim_id_channel: llq::Producer) -> Self { Graph { diff --git a/src/render/node_collection.rs b/src/render/node_collection.rs index c86aadb8..5882a340 100644 --- a/src/render/node_collection.rs +++ b/src/render/node_collection.rs @@ -4,6 +4,7 @@ use crate::render::graph::Node; use std::cell::RefCell; use std::ops::{Index, IndexMut}; +#[derive(Debug)] pub(crate) struct NodeCollection { nodes: Vec>>, } diff --git a/src/render/processor.rs b/src/render/processor.rs index 8fee65bb..580eb4c4 100644 --- a/src/render/processor.rs +++ b/src/render/processor.rs @@ -116,6 +116,18 @@ pub trait AudioProcessor: Send { fn onmessage(&mut self, msg: &mut dyn Any) { log::warn!("Ignoring incoming message"); } + + /// Return the name of the actual AudioProcessor type + #[doc(hidden)] // not meant to be user facing + fn name(&self) -> &'static str { + std::any::type_name::() + } +} + +impl std::fmt::Debug for dyn AudioProcessor { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct(self.name()).finish_non_exhaustive() + } } struct DerefAudioRenderQuantumChannel<'a>(std::cell::Ref<'a, Node>); @@ -161,3 +173,31 @@ impl<'a> AudioParamValues<'a> { crate::context::LISTENER_AUDIO_PARAM_IDS.map(|p| self.get(&p)) } } + +#[cfg(test)] +mod tests { + use super::*; + + struct TestNode; + + impl AudioProcessor for TestNode { + fn process( + &mut self, + _inputs: &[AudioRenderQuantum], + _outputs: &mut [AudioRenderQuantum], + _params: AudioParamValues<'_>, + _scope: &RenderScope, + ) -> bool { + todo!() + } + } + + #[test] + fn test_debug_fmt() { + let proc = &TestNode as &dyn AudioProcessor; + assert_eq!( + &format!("{:?}", proc), + "web_audio_api::render::processor::tests::TestNode { .. }" + ); + } +} diff --git a/src/render/thread.rs b/src/render/thread.rs index 606fb492..d567a129 100644 --- a/src/render/thread.rs +++ b/src/render/thread.rs @@ -24,6 +24,7 @@ use super::graph::Graph; pub(crate) struct RenderThread { graph: Option, sample_rate: f32, + buffer_size: usize, /// number of channels of the backend stream, i.e. sound card number of /// channels clamped to MAX_CHANNELS number_of_channels: usize, @@ -46,6 +47,17 @@ unsafe impl Sync for Graph {} unsafe impl Send for RenderThread {} unsafe impl Sync for RenderThread {} +impl std::fmt::Debug for RenderThread { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RenderThread") + .field("sample_rate", &self.sample_rate) + .field("buffer_size", &self.buffer_size) + .field("frames_played", &self.frames_played.load(Ordering::Relaxed)) + .field("number_of_channels", &self.number_of_channels) + .finish_non_exhaustive() + } +} + impl RenderThread { pub fn new( sample_rate: f32, @@ -56,6 +68,7 @@ impl RenderThread { Self { graph: None, sample_rate, + buffer_size: 0, number_of_channels, frames_played, receiver: Some(receiver), @@ -148,6 +161,16 @@ impl RenderThread { gc.push(msg) } } + RunDiagnostics { mut buffer } => { + if let Some(sender) = self.event_sender.as_ref() { + use std::io::Write; + writeln!(&mut buffer, "{:#?}", &self).ok(); + writeln!(&mut buffer, "{:?}", &self.graph).ok(); + sender + .try_send(EventDispatch::diagnostics(buffer)) + .expect("Unable to send diagnostics - channel is full"); + } + } } } } @@ -236,6 +259,8 @@ impl RenderThread { } fn render_inner + Clone>(&mut self, mut output_buffer: &mut [S]) { + self.buffer_size = output_buffer.len(); + // There may be audio frames left over from the previous render call, // if the cpal buffer size did not align with our internal RENDER_QUANTUM_SIZE if let Some((offset, prev_rendered)) = self.buffer_offset.take() {