~nickbp/soundview

91dda82c5e38bcecb59bb4736928c5935bae196e — Nick Parker 4 months ago ab173ef
Move work into subthreads to detect panics
8 files changed, 221 insertions(+), 112 deletions(-)

M Cargo.lock
M Cargo.toml
M src/fourier.rs
M src/lib.rs
M src/main.rs
M src/recorder.rs
A src/renderer.rs
A src/sdl.rs
M Cargo.lock => Cargo.lock +18 -0
@@ 3,6 3,15 @@
version = 3

[[package]]
name = "aho-corasick"
version = "0.7.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
dependencies = [
 "memchr",
]

[[package]]
name = "ansi_term"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"


@@ 139,6 148,12 @@ dependencies = [
]

[[package]]
name = "memchr"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"

[[package]]
name = "no-std-compat"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"


@@ 217,6 232,8 @@ version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
dependencies = [
 "aho-corasick",
 "memchr",
 "regex-syntax",
]



@@ 326,6 343,7 @@ dependencies = [
 "anyhow",
 "crossbeam-channel",
 "git-testament",
 "regex",
 "ringbuf",
 "rustfft",
 "sdl2",

M Cargo.toml => Cargo.toml +1 -0
@@ 12,6 12,7 @@ repository = "https://sr.ht/~nickbp/soundview/"
anyhow = "1.0"
crossbeam-channel = "0.5"
git-testament = "0.2"
regex = "1.5"
ringbuf = "0.2"
rustfft = "6.0"
sdl2 = "0.35"

M src/fourier.rs => src/fourier.rs +4 -3
@@ 1,3 1,4 @@
use anyhow::Result;
use crossbeam_channel::{Receiver, Sender};
use ringbuf::RingBuffer;
use rustfft::{FftPlanner, num_complex::Complex};


@@ 18,7 19,7 @@ fn hann(width: usize, offset: usize, window_length: usize) -> Vec<f32> {
    samples
}

pub fn process_audio_loop(fft_size: usize, recv_audio: Receiver<Vec<f32>>, send_processed: Sender<Vec<f32>>) {
pub fn process_audio_loop(fft_size: usize, recv_audio: Receiver<Vec<f32>>, send_processed: Sender<Vec<f32>>) -> Result<()> {
    let fft = FftPlanner::new().plan_fft_forward(fft_size);
    // Have the edges of the FFT input be zeroes
    let window = hann(fft_size - 2, 1, fft_size);


@@ 73,14 74,14 @@ pub fn process_audio_loop(fft_size: usize, recv_audio: Receiver<Vec<f32>>, send_
                    // Process the selected data in fft_buf, then forward the result
                    fft.process(&mut fft_buf);
                    if let Err(e) = send_processed.send(Vec::from_iter(fft_buf.iter().map(|c| c.norm()))) {
                        warn!("Failed to send processed data: {}", e);
                        warn!("failed to send processed data: {}", e);
                    }
                }
            },
            Err(e) => {
                // This generally means that the input channel has closed
                debug!("exiting audio processing thread: {}", e);
                break;
                return Ok(());
            }
        }
    }

M src/lib.rs => src/lib.rs +4 -0
@@ 1,5 1,9 @@
/// Thread for processing incoming audio with a windowed fourier transform.
pub mod fourier;
pub mod hsl;
/// Utilities relating to setting up log output.
pub mod logging;
pub mod recorder;
pub mod renderer;
/// Thread for setting up the SDL display and event loop.
pub mod sdl;

M src/main.rs => src/main.rs +44 -93
@@ 1,24 1,16 @@
use anyhow::{anyhow, bail, Result};
use anyhow::{bail, Result};
use git_testament::{git_testament, render_testament};
use regex::Regex;
use tracing::{debug, info, warn};

use sdl2::event::{Event, WindowEvent};
use sdl2::keyboard::{Keycode, Mod};
use sdl2::pixels::Color;

use soundview::{fourier, logging, recorder};
use soundview::{fourier, logging, recorder, sdl};

use std::sync::Arc;
use std::thread;
use std::time::Duration;

git_testament!(TESTAMENT);

fn set_hint(name: &str) {
    let before = sdl2::hint::get(name);
    sdl2::hint::set(name, "1");
    debug!("{}: {:?} => {:?}", name, before, sdl2::hint::get(name));
}

fn main() -> Result<()> {
    logging::init_logging();
    let sdl_version = sdl2::version::version();


@@ 30,98 22,57 @@ fn main() -> Result<()> {
        }
    }

    // Enables PulseAudio "monitor" devices in the list.
    // These are what allow visualizing audio that's playing on the system.
    // Without this we can only visualize the mic input.
    // This option only works on 2.0.16 or newer.
    set_hint("SDL_AUDIO_INCLUDE_MONITORS");

    let sdl_context = sdl2::init().map_err(|e| anyhow!(e))?;

    let video_subsystem = sdl_context.video().map_err(|e| anyhow!(e))?;
    if video_subsystem.is_screen_saver_enabled() {
        debug!("disabling screensaver");
        video_subsystem.disable_screen_saver();
    }
    let mut canvas = video_subsystem.window("soundview", 800, 600)
        .resizable()
        .build()
        .map_err(|e| anyhow!(e))?
        .into_canvas()
        .accelerated()
        .present_vsync()
        .build()
        .map_err(|e| anyhow!(e))?;

    let (send_audio, recv_audio) = crossbeam_channel::bounded::<Vec<f32>>(100);
    let (send_processed, recv_processed) = crossbeam_channel::bounded::<Vec<f32>>(100);

    let shared_arc = Arc::new(false);

    // TODO expose user options for these
    let device_filter = Regex::new("^Monitor of.*")?;
    let freq = Some(48000); // NOTE: trying 192k and even 384k seemed to work
    let samples_fourier = 1024; // NOTE: must be power of 2, may make sense to just scale with freq
    let samples_sdl = Some(samples_fourier.clone());

    let mut thread_handles = Vec::new();
    let shared_arc_sdl = shared_arc.clone();
    thread_handles.push(
        thread::Builder::new()
            .name("fourier".to_string())
            .name("sdl".to_string())
            .spawn(move || {
                // TODO expose fourier size as a cmdline option?
                fourier::process_audio_loop(1024, recv_audio, send_processed)
                let _local_shared_arc = shared_arc_sdl;
                let rec = recorder::Recorder::new(device_filter, freq, samples_sdl, send_audio);
                if let Err(e) = sdl::process_event_loop(recv_processed, rec) {
                    warn!("exiting event loop with error: {}", e);
                } else {
                    debug!("exiting event loop: told to quit");
                }
            })?
    );

    {
        // Ensure that rec (and send_audio) go out of scope when we're exiting,
        // so that the audio processing thread shuts down.
        let mut rec = recorder::Recorder::new(sdl_context.audio().map_err(|e| anyhow!(e))?, send_audio);

        let mut i = 0;
        'mainloop: loop {
            // handle events (if any)
            // TODO could break this out into a separate function, but it needs to interact with everything else...
            if let Some(event) = sdl_context.event_pump().map_err(|e| anyhow!(e))?.poll_event() {
                match event {
                    // <Close window>, Esc, Q, Ctrl+C, Alt+F4: Quit
                    Event::Quit {..} |
                    Event::KeyDown { keycode: Some(Keycode::Escape) | Some(Keycode::Q), .. } |
                    Event::KeyDown { keymod: Mod::LCTRLMOD | Mod::RCTRLMOD, keycode: Some(Keycode::C), .. } |
                    Event::KeyDown { keymod: Mod::LALTMOD | Mod::RALTMOD, keycode: Some(Keycode::F4), .. } => {
                        break 'mainloop;
                    },

                    // R, Space: Rotate
                    Event::KeyDown { keycode: Some(Keycode::R) | Some(Keycode::Space), .. } => {
                        info!("rotate");
                    },

                    // A, D, I: Switch Audio Device Input
                    Event::KeyDown { keycode: Some(Keycode::A) | Some(Keycode::D) | Some(Keycode::I), .. } => {
                        info!("next device");
                    },

                    Event::Window {win_event: WindowEvent::SizeChanged(x, y), ..} | Event::Window {win_event: WindowEvent::Resized(x,y), ..} => {
                        info!("resize: x={}, y={}", x, y);
                    },

                    Event::AudioDeviceAdded {iscapture: true, which: idx, ..} => {
                        rec.device_added(idx)?;
                    },
                    Event::AudioDeviceRemoved {iscapture: true, which: idx, ..} => {
                        info!("capture device removed: idx={}", idx);
                    },
                    _ => {
                        debug!("ignored event: {:?}", event);
                    }
    let shared_arc_fourier = shared_arc.clone();
    thread_handles.push(
        thread::Builder::new()
            .name("fourier".to_string())
            .spawn(move || {
                let _local_shared_arc = shared_arc_fourier;
                if let Err(e) = fourier::process_audio_loop(samples_fourier.into(), recv_audio, send_processed) {
                    warn!("exiting audio loop with error: {}", e);
                } else {
                    debug!("exiting audio loop: shutting down");
                }
            }

            // render
            while let Ok(audio) = recv_processed.try_recv() {
                debug!("got processed audio: {:?}", audio.len()); // TODO actual rendering...
            }
            i = (i + 1) % 255;
            canvas.set_draw_color(Color::RGB(i, 64, 255 - i));
            canvas.clear();
            })?
    );

            canvas.present();
            thread::sleep(Duration::new(0, 1_000_000_000u32 / 60));
    // count: Each thread's copy, plus our local copy
    let expected_count = thread_handles.len() + 1;
    let sleep_nanos = 100 * 1_000_000; // 100ms
    // Wait for at least one thread to exit for any reason (user exit, panic, etc...)
    loop {
        let got_count = Arc::<bool>::strong_count(&shared_arc);
        if got_count != expected_count {
            info!("at least one thread has exited: expected={} got={}", expected_count, got_count);
            break;
        }
        thread::sleep(Duration::new(0, sleep_nanos));
    }

    for thread in thread_handles {


@@ 130,7 81,7 @@ fn main() -> Result<()> {
        } else {
            "???".to_string()
        };
        info!("Waiting for thread to exit: {}", thread_name);
        debug!("waiting for thread to exit: {}", thread_name);
        // Would use with_context but the error type is weird
        match thread.join().err() {
            Some(e) => bail!("Failed to wait for thread to exit: {} {:?}", thread_name, e),

M src/recorder.rs => src/recorder.rs +22 -16
@@ 1,4 1,5 @@
use anyhow::{anyhow, Result};
use regex::Regex;
use tracing::{error, info};

use sdl2::audio::{AudioCallback, AudioDevice, AudioSpecDesired};


@@ 21,40 22,45 @@ impl AudioCallback for Callback {
}

pub struct Recorder {
    audio_subsystem: AudioSubsystem,
    device_filter: Regex,
    freq: Option<i32>,
    samples: Option<u16>,
    audio_out: crossbeam_channel::Sender<Vec<f32>>,
    rec_dev: Option<AudioDevice<Callback>>
}

impl Recorder {
    pub fn new(audio_subsystem: AudioSubsystem, audio_out: crossbeam_channel::Sender<Vec<f32>>) -> Recorder {
    pub fn new(device_filter: Regex, freq: Option<i32>, samples: Option<u16>, audio_out: crossbeam_channel::Sender<Vec<f32>>) -> Recorder {
        Recorder {
            audio_subsystem,
            device_filter,
            freq,
            samples,
            audio_out,
            rec_dev: Option::None
        }
    }

    pub fn device_added(self: &mut Recorder, which: u32) -> Result<()> {
        let name = self.audio_subsystem.audio_capture_device_name(which).map_err(|e| anyhow!(e))?;
        if name != "Monitor of Built-in Audio Analog Stereo" {
            // ignore
            info!("added capture device (ignored): idx={} name={:?}", which, name);
            return Ok(());
    // TODO device mgmt probably could stand a rethink?
    // if no device filter is specified, could just autoselect the 'loudest' device on startup and every Xs after that
    pub fn device_added(self: &mut Recorder, audio_subsystem: &AudioSubsystem, which: u32) -> Result<()> {
        let name = audio_subsystem.audio_capture_device_name(which).map_err(|e| anyhow!(e))?;
        if self.device_filter.is_match(&name) {
            self.record(audio_subsystem, name.as_str())
        } else {
            info!("Ignoring capture device: idx={} name={:?}", which, name);
            Ok(())
        }
        info!("added capture device (setting up): idx={} name={:?}", which, name);
        self.record(name.as_str())
    }

    fn record(self: &mut Recorder, device_name: &str) -> Result<()> {
    fn record(self: &mut Recorder, audio_subsystem: &AudioSubsystem, device_name: &str) -> Result<()> {
        let desired_spec = AudioSpecDesired {
            freq: Some(44100),
            channels: Some(1),  // mono
            samples: None       // default sample size, power of 2
            freq: self.freq.clone(),
            channels: Some(1),
            samples: self.samples.clone()
        };

        let audio_out_cpy = self.audio_out.clone();
        let rec_dev = self.audio_subsystem.open_capture(device_name, &desired_spec, |actual_spec| {
        let rec_dev = audio_subsystem.open_capture(device_name, &desired_spec, |actual_spec| {
            info!("Capturing audio from '{}' with spec: {:?}", device_name, actual_spec);
            // initialize the audio callback
            Callback {

A src/renderer.rs => src/renderer.rs +34 -0
@@ 0,0 1,34 @@
use anyhow::Result;
use sdl2::pixels::Color;
use sdl2::render::Canvas;
use sdl2::video::Window;
use tracing::{debug};

pub struct Renderer {
    canvas: Canvas<Window>,
    i: u8,
}

impl Renderer {
    pub fn new(canvas: Canvas<Window>) -> Renderer {
        Renderer {
            canvas,
            i: 0
        }
    }

    pub fn add(self: &mut Renderer, audio: Vec<f32>) -> Result<()> {
        debug!("got processed audio: {:?}", audio.len()); // TODO actual rendering...
        Ok(())
    }

    pub fn render(self: &mut Renderer) -> Result<()> {
        self.i = (self.i + 1) % 255;
        self.canvas.set_draw_color(Color::RGB(self.i, 64, 255 - self.i));
        self.canvas.clear();

        self.canvas.present();
        Ok(())
    }
}


A src/sdl.rs => src/sdl.rs +94 -0
@@ 0,0 1,94 @@
use anyhow::{anyhow, Result};
use crossbeam_channel::Receiver;
use sdl2::event::{Event, WindowEvent};
use sdl2::keyboard::{Keycode, Mod};
use tracing::{debug, info};

use std::thread;
use std::time::Duration;

use crate::{recorder, renderer};

fn set_hint(name: &str) {
    let before = sdl2::hint::get(name);
    sdl2::hint::set(name, "1");
    debug!("{}: {:?} => {:?}", name, before, sdl2::hint::get(name));
}

pub fn process_event_loop(recv_processed: Receiver<Vec<f32>>, mut rec: recorder::Recorder) -> Result<()> {
    // Enables PulseAudio "monitor" devices in the list.
    // These are what allow visualizing audio that's playing on the system.
    // Without this we can only visualize the mic input.
    // This option only works on 2.0.16 or newer.
    set_hint("SDL_AUDIO_INCLUDE_MONITORS");

    let sdl_context = sdl2::init().map_err(|e| anyhow!(e))?;

    let video_subsystem = sdl_context.video().map_err(|e| anyhow!(e))?;
    if video_subsystem.is_screen_saver_enabled() {
        debug!("disabling screensaver");
        video_subsystem.disable_screen_saver();
    }
    let canvas = video_subsystem.window("soundview", 800, 600)
        .resizable()
        .build()
        .map_err(|e| anyhow!(e))?
        .into_canvas()
        .accelerated()
        .present_vsync()
        .build()
        .map_err(|e| anyhow!(e))?;

    let audio_subsystem = sdl_context.audio().map_err(|e| anyhow!(e))?;

    let mut renderer = renderer::Renderer::new(canvas);
    loop {
        // handle events (if any)
        // TODO could break this out into a separate function, but it needs to interact with everything else...
        if let Some(event) = sdl_context.event_pump().map_err(|e| anyhow!(e))?.poll_event() {
            match event {
                // <Close window>, Esc, Q, Ctrl+C, Alt+F4: Quit
                Event::Quit {..} |
                Event::KeyDown { keycode: Some(Keycode::Escape) | Some(Keycode::Q), .. } |
                Event::KeyDown { keymod: Mod::LCTRLMOD | Mod::RCTRLMOD, keycode: Some(Keycode::C), .. } |
                Event::KeyDown { keymod: Mod::LALTMOD | Mod::RALTMOD, keycode: Some(Keycode::F4), .. } => {
                    return Ok(());
                },

                // R, Space: Rotate
                Event::KeyDown { keycode: Some(Keycode::R) | Some(Keycode::Space), .. } => {
                    info!("rotate");
                },

                // A, D, I: Switch Audio Device Input
                Event::KeyDown { keycode: Some(Keycode::A) | Some(Keycode::D) | Some(Keycode::I), .. } => {
                    info!("next device");
                },

                Event::Window {win_event: WindowEvent::SizeChanged(x, y), ..} | Event::Window {win_event: WindowEvent::Resized(x,y), ..} => {
                    info!("resize: x={}, y={}", x, y);
                },

                Event::AudioDeviceAdded {iscapture: true, which: idx, ..} => {
                    rec.device_added(&audio_subsystem, idx)?;
                },
                Event::AudioDeviceRemoved {iscapture: true, which: idx, ..} => {
                    info!("capture device removed: idx={}", idx); // TODO tell recorder about this too
                },
                _ => {
                    debug!("ignored event: {:?}", event);
                }
            }
        }

        // render
        while let Ok(audio) = recv_processed.try_recv() {
            // NOTE: this needs to be pretty fast, if it's too slow then we never move on!
            renderer.add(audio)?;
        }
        renderer.render()?;

        // 1/60th of a second
        thread::sleep(Duration::new(0, 1_000_000_000u32 / 60));
    }
}