Misc cleanup and comments

This commit is contained in:
Muaz Ahmad 2024-12-13 12:31:06 +05:00
parent d9dddcd757
commit 9c4914178d
12 changed files with 49 additions and 24 deletions

1
.gitignore vendored
View file

@ -1,3 +1,2 @@
target/ target/
subtails.toml subtails.toml
src/cover_default.png

View file

@ -1,13 +1,14 @@
use std::{ use std::{
collections::VecDeque, collections::VecDeque,
io::{Read, Write}, io::{Read, Write},
process::{Child, ChildStdin, ChildStdout, Command, Stdio}, process::{Child, Command, Stdio},
sync::mpsc::{channel, Receiver, Sender, TryRecvError}, sync::mpsc::{channel, Receiver, Sender, TryRecvError},
thread, thread,
}; };
use crate::utils::Error; use crate::utils::Error;
// Struct to manage and handle sending the bytestream to and getting frames from the decoder (ffmpeg)
pub struct DecoderContext { pub struct DecoderContext {
process: Child, process: Child,
encoded_in: Sender<Vec<u8>>, encoded_in: Sender<Vec<u8>>,
@ -23,12 +24,14 @@ pub fn init(stride: usize, sample_rate: u32) -> Result<DecoderContext, Error> {
.args(["-hide_banner", "-i", "-", "-f", "s16le", "-"]) .args(["-hide_banner", "-i", "-", "-f", "s16le", "-"])
.stdin(Stdio::piped()) .stdin(Stdio::piped())
.stdout(Stdio::piped()) .stdout(Stdio::piped())
.stderr(Stdio::null()) .stderr(Stdio::null()) // stderr messes with tuis
.spawn()?; .spawn()?;
let (encoded_in, encoded_out) = channel::<Vec<u8>>(); let (encoded_in, encoded_out) = channel::<Vec<u8>>();
let (frames_in, frames_out) = channel(); let (frames_in, frames_out) = channel();
let mut ffmpeg_in = decoder.stdin.take().unwrap(); let mut ffmpeg_in = decoder.stdin.take().unwrap();
let mut ffmpeg_out = decoder.stdout.take().unwrap(); let mut ffmpeg_out = decoder.stdout.take().unwrap();
// bytestream writer
thread::spawn(move || loop { thread::spawn(move || loop {
if let Ok(chunk) = encoded_out.recv() { if let Ok(chunk) = encoded_out.recv() {
ffmpeg_in.write_all(chunk.as_slice()).unwrap(); ffmpeg_in.write_all(chunk.as_slice()).unwrap();
@ -36,7 +39,10 @@ pub fn init(stride: usize, sample_rate: u32) -> Result<DecoderContext, Error> {
break; break;
} }
}); });
// frame reader
thread::spawn(move || loop { thread::spawn(move || loop {
// read in 10 millisecond chunks
let mut frames = vec![0; stride * sample_rate as usize / 1000 * 10]; let mut frames = vec![0; stride * sample_rate as usize / 1000 * 10];
match ffmpeg_out.read_exact(frames.as_mut_slice()) { match ffmpeg_out.read_exact(frames.as_mut_slice()) {
Err(_) => break, Err(_) => break,
@ -54,7 +60,7 @@ pub fn init(stride: usize, sample_rate: u32) -> Result<DecoderContext, Error> {
sample_buf: VecDeque::new(), sample_buf: VecDeque::new(),
input_done: false, input_done: false,
done_signalled: false, done_signalled: false,
fetch_queued: true, fetch_queued: true, // start by pretending the first chunk is already requested, handled by other modules
}) })
} }
@ -67,11 +73,13 @@ impl Drop for DecoderContext {
impl DecoderContext { impl DecoderContext {
pub fn append_chunk(&mut self, chunk: Vec<u8>) -> Result<(), Error> { pub fn append_chunk(&mut self, chunk: Vec<u8>) -> Result<(), Error> {
self.encoded_in.send(chunk)?; self.encoded_in.send(chunk)?;
// a requested chunk has been received and frames should buffer, free to request another if needed.
self.fetch_queued = false; self.fetch_queued = false;
Ok(()) Ok(())
} }
pub fn fetch_samples(&mut self) { pub fn fetch_samples(&mut self) {
// get as many samples as have been queued by now.
loop { loop {
match self.frames_out.try_recv() { match self.frames_out.try_recv() {
Err(TryRecvError::Empty) => break, Err(TryRecvError::Empty) => break,
@ -83,15 +91,20 @@ impl DecoderContext {
pub fn next_sample(&mut self) -> (Vec<u8>, SampleReturnState) { pub fn next_sample(&mut self) -> (Vec<u8>, SampleReturnState) {
let curr_n_samples = self.sample_buf.len(); let curr_n_samples = self.sample_buf.len();
// signal nothing by default
let mut fetch_more_file = SampleReturnState::BufferPending; let mut fetch_more_file = SampleReturnState::BufferPending;
// < 10s of frames left in buffer
if curr_n_samples < 1000 { if curr_n_samples < 1000 {
self.fetch_samples(); self.fetch_samples();
// no new samples were fetched
if self.sample_buf.len() == curr_n_samples { if self.sample_buf.len() == curr_n_samples {
// if last chunk is already received, can't fetch more
if !self.input_done { if !self.input_done {
if !self.fetch_queued { if !self.fetch_queued {
fetch_more_file = SampleReturnState::FetchMore; fetch_more_file = SampleReturnState::FetchMore;
self.fetch_queued = true; self.fetch_queued = true;
} }
// last few milliseconds of audio
} else if curr_n_samples < 50 && !self.done_signalled { } else if curr_n_samples < 50 && !self.done_signalled {
fetch_more_file = SampleReturnState::FileDone; fetch_more_file = SampleReturnState::FileDone;
self.done_signalled = true; self.done_signalled = true;
@ -100,6 +113,7 @@ impl DecoderContext {
} }
return ( return (
// if there is somehow no sample at this point, return empty audio
self.sample_buf.pop_front().unwrap_or(vec![0; 1920]), self.sample_buf.pop_front().unwrap_or(vec![0; 1920]),
fetch_more_file, fetch_more_file,
); );

View file

@ -1,4 +1,4 @@
use num_complex::{Complex, Complex64}; use num_complex::Complex;
use rustfft::{Fft, FftPlanner}; use rustfft::{Fft, FftPlanner};
use crate::utils::FFTResult; use crate::utils::FFTResult;
@ -31,6 +31,7 @@ impl FFTFrame {
} }
pub fn push(&mut self, new_samples: [[Complex<f64>; 480]; 2]) { pub fn push(&mut self, new_samples: [[Complex<f64>; 480]; 2]) {
// only keep n sample frames of 10ms in buffer to compute
if self.buffer.len() == FFT_WINDOW { if self.buffer.len() == FFT_WINDOW {
self.buffer.pop_back(); self.buffer.pop_back();
} }
@ -39,6 +40,7 @@ impl FFTFrame {
} }
pub fn compute(&mut self) -> Option<FFTResult> { pub fn compute(&mut self) -> Option<FFTResult> {
// only compute every other 10ms.
if self.n_iter % 2 != 0 { if self.n_iter % 2 != 0 {
return None; return None;
} }
@ -65,6 +67,7 @@ impl FFTFrame {
let mut result = [0.0; 20]; let mut result = [0.0; 20];
let mut bin_start = 0; let mut bin_start = 0;
for bin_i in 0..20 { for bin_i in 0..20 {
// bin windows with gamma, https://dlbeer.co.nz/articles/fftvis.html
let bin_end = let bin_end =
(f64::powi((bin_i + 1) as f64 / 20.0, 2) * FFT_FRAME_SIZE as f64) as usize; (f64::powi((bin_i + 1) as f64 / 20.0, 2) * FFT_FRAME_SIZE as f64) as usize;
let max_l = buff_l[bin_start..std::cmp::min(bin_end, FFT_FRAME_SIZE)] let max_l = buff_l[bin_start..std::cmp::min(bin_end, FFT_FRAME_SIZE)]
@ -77,6 +80,7 @@ impl FFTFrame {
.map(|x| x.norm_sqr() / FFT_FRAME_SIZE as f64) .map(|x| x.norm_sqr() / FFT_FRAME_SIZE as f64)
.max_by(|a, b| a.total_cmp(b)) .max_by(|a, b| a.total_cmp(b))
.unwrap(); .unwrap();
// compute a log10 from normed magnitude, clamp to -70-0 (arbitrary)
result[bin_i] = result[bin_i] =
(10.0 * f64::log10(f64::max(max_l, max_r))).clamp(-70.0, 0.0) / 70.0 + 1.0; (10.0 * f64::log10(f64::max(max_l, max_r))).clamp(-70.0, 0.0) / 70.0 + 1.0;
bin_start = bin_end; bin_start = bin_end;
@ -84,6 +88,7 @@ impl FFTFrame {
Some(result) Some(result)
} }
// hamming window function
fn hamming(&self, buff: &mut [Complex<f64>]) { fn hamming(&self, buff: &mut [Complex<f64>]) {
for i in 0..buff.len() { for i in 0..buff.len() {
let mult = 25.0 / 46.0 let mult = 25.0 / 46.0

View file

@ -1,5 +1,4 @@
use std::{ use std::{
collections::VecDeque,
sync::{ sync::{
mpsc::{channel, Sender}, mpsc::{channel, Sender},
Arc, Arc,

View file

@ -37,6 +37,7 @@ pub fn init(
Some(mut buf) => { Some(mut buf) => {
let data = &mut (buf.datas_mut()[0]); let data = &mut (buf.datas_mut()[0]);
let n_iter = if let Some(sample_buf) = data.data() { let n_iter = if let Some(sample_buf) = data.data() {
// try to fetch any samples that exist
if let Ok(next_samples) = samples.try_recv() { if let Ok(next_samples) = samples.try_recv() {
if next_samples.len() > sample_buf.len() { if next_samples.len() > sample_buf.len() {
panic!("Buffer too small for given sample chunks"); panic!("Buffer too small for given sample chunks");
@ -59,6 +60,9 @@ pub fn init(
.register()?; .register()?;
let audio_source_ref = audio_source.clone(); let audio_source_ref = audio_source.clone();
// use this trigger to reattach a stream on chaning audio files, intended for sample rate/ channel changes.
// Rest of the code assumes the default anyway, so it doesn't even matter
let _receiver = pw_signal.attach(mainloop.loop_(), move |pod_bytes| { let _receiver = pw_signal.attach(mainloop.loop_(), move |pod_bytes| {
let mut params = [Pod::from_bytes(&pod_bytes).unwrap()]; let mut params = [Pod::from_bytes(&pod_bytes).unwrap()];
audio_source_ref.disconnect().unwrap(); audio_source_ref.disconnect().unwrap();

View file

@ -1,5 +1,4 @@
use std::{ use std::{
collections::VecDeque,
sync::{ sync::{
mpsc::{channel, Receiver, Sender, TryRecvError}, mpsc::{channel, Receiver, Sender, TryRecvError},
Arc, Arc,
@ -67,6 +66,7 @@ impl SoundManager {
} }
} }
// scale a given audio sample frame by volume, assumes i16 data
fn adjust_volume(&self, frame: &mut [u8]) { fn adjust_volume(&self, frame: &mut [u8]) {
for i in 0..frame.len() / 2 { for i in 0..frame.len() / 2 {
let mut val_bytes = [0; 2]; let mut val_bytes = [0; 2];
@ -80,11 +80,12 @@ impl SoundManager {
self.volume += vol_change; self.volume += vol_change;
if self.volume < 0.0 { if self.volume < 0.0 {
self.volume = 0.0; self.volume = 0.0;
} else if self.volume > 100.0 { } else if self.volume > 1.0 {
self.volume = 100.0; self.volume = 1.0;
} }
} }
// push a frame to the fft compute struct, and calculate the fft bins
fn fft_compute(&mut self, frame: &[u8]) -> Option<FFTResult> { fn fft_compute(&mut self, frame: &[u8]) -> Option<FFTResult> {
let mut samples = [[Complex { let mut samples = [[Complex {
re: 0.0f64, re: 0.0f64,
@ -103,6 +104,8 @@ impl SoundManager {
fn push_samples(&mut self) -> Result<(), Error> { fn push_samples(&mut self) -> Result<(), Error> {
let (mut frame, fetch_more) = self.decoder_context.next_sample(); let (mut frame, fetch_more) = self.decoder_context.next_sample();
// send the computed spectrogram and frames to the audio sample buffer.
// this is not a good solution since the timing is desynced with buffering.
if let Some(bins) = self.fft_compute(frame.as_slice()) { if let Some(bins) = self.fft_compute(frame.as_slice()) {
self.player_chan.send(PlayerEvent::FFTBins(bins))?; self.player_chan.send(PlayerEvent::FFTBins(bins))?;
} }
@ -134,7 +137,6 @@ impl SoundManager {
AudioEvent::TogglePlaying => self.playing = !self.playing, AudioEvent::TogglePlaying => self.playing = !self.playing,
AudioEvent::VolumeUp => self.set_volume(VOLUME_CHANGE_INTERVAL), AudioEvent::VolumeUp => self.set_volume(VOLUME_CHANGE_INTERVAL),
AudioEvent::VolumeDown => self.set_volume(-VOLUME_CHANGE_INTERVAL), AudioEvent::VolumeDown => self.set_volume(-VOLUME_CHANGE_INTERVAL),
_ => unimplemented!(),
} }
Ok(()) Ok(())
} }

View file

@ -24,6 +24,7 @@ pub fn validate_config(settings: &mut Settings) -> Result<(), Error> {
if u.scheme() != "http" && u.scheme() != "https" { if u.scheme() != "http" && u.scheme() != "https" {
return Err(Box::new(ConfigError::InvalidServerAddress)); return Err(Box::new(ConfigError::InvalidServerAddress));
} }
// append /rest to a given subsonic server address.
settings.subsonic.server_address = settings.subsonic.server_address =
format!("{}{}", settings.subsonic.server_address, "/rest") format!("{}{}", settings.subsonic.server_address, "/rest")
} }

View file

@ -10,6 +10,7 @@ mod player;
mod ssonic; mod ssonic;
mod utils; mod utils;
// setup submodules and threads
fn init() -> Result<Receiver<utils::Error>, utils::Error> { fn init() -> Result<Receiver<utils::Error>, utils::Error> {
let settings = config::init()?; let settings = config::init()?;
let (error_in, error_out) = channel(); let (error_in, error_out) = channel();

View file

@ -62,7 +62,8 @@ impl Metadata {
pub fn update_spectrogram(&mut self, bins: FFTResult) { pub fn update_spectrogram(&mut self, bins: FFTResult) {
for i in 0..20 { for i in 0..20 {
self.spectrogram[i].1 = self.spectrogram[i].1 * 0.8 + 0.2 * bins[i]; // smoothing update to spectrogram values
self.spectrogram[i].1 = self.spectrogram[i].1 * 0.6 + 0.4 * bins[i];
} }
} }
} }

View file

@ -5,12 +5,11 @@ use std::{
}; };
use crossterm::event::{poll, read, Event, KeyCode}; use crossterm::event::{poll, read, Event, KeyCode};
use image::DynamicImage;
use crate::{ use crate::{
audio::AudioEvent, audio::AudioEvent,
ssonic::{response::Song, APIEvent, MAX_CHUNK_SIZE}, ssonic::{APIEvent, MAX_CHUNK_SIZE},
utils::{default_cover, time_rem, Error, FFTResult}, utils::{default_cover, time_rem, Error},
}; };
use super::{errors::PlayerError, Player, PlayerEvent}; use super::{errors::PlayerError, Player, PlayerEvent};
@ -54,7 +53,6 @@ impl Player {
PlayerEvent::AddAudioChunk(chunk_len) => self.recv_chunk(chunk_len)?, PlayerEvent::AddAudioChunk(chunk_len) => self.recv_chunk(chunk_len)?,
PlayerEvent::FetchChunk => self.fetch_audio_chunk(false)?, PlayerEvent::FetchChunk => self.fetch_audio_chunk(false)?,
PlayerEvent::FFTBins(bins) => self.tui_root.metadata.update_spectrogram(bins), PlayerEvent::FFTBins(bins) => self.tui_root.metadata.update_spectrogram(bins),
_ => unimplemented!(),
} }
} }
if self.playlist.last_song() { if self.playlist.last_song() {
@ -93,7 +91,9 @@ impl Player {
fn play_next(&mut self) -> Result<(), Error> { fn play_next(&mut self) -> Result<(), Error> {
let song = match self.playlist.get_next() { let song = match self.playlist.get_next() {
None => { None => {
// no song exists, requeue the event // no song exists, should be impossible
// this will put the player in an endless loop as of now.
// needs changing
self.player_chan_in.send(PlayerEvent::PlayNext)?; self.player_chan_in.send(PlayerEvent::PlayNext)?;
return Ok(()); return Ok(());
} }

View file

@ -1,11 +1,9 @@
use std::time::Duration;
use image::DynamicImage; use image::DynamicImage;
use layout::Flex; use layout::Flex;
use ratatui::{ use ratatui::{
layout::{Constraint, Layout}, layout::{Constraint, Layout},
prelude::*, prelude::*,
widgets::{Axis, BarChart, Block, Chart, Dataset, Gauge, Paragraph, Widget, Wrap}, widgets::{Axis, Chart, Dataset, Paragraph, Widget, Wrap},
}; };
use ratatui_image::{picker::Picker, protocol::StatefulProtocol, StatefulImage}; use ratatui_image::{picker::Picker, protocol::StatefulProtocol, StatefulImage};
@ -57,7 +55,6 @@ impl Root {
} }
fn render_spectrogram(&self, area: Rect, buf: &mut Buffer) { fn render_spectrogram(&self, area: Rect, buf: &mut Buffer) {
eprintln!("{:?}", self.metadata.spectrogram);
Chart::new(vec![Dataset::default() Chart::new(vec![Dataset::default()
.graph_type(ratatui::widgets::GraphType::Bar) .graph_type(ratatui::widgets::GraphType::Bar)
.data(&self.metadata.spectrogram)]) .data(&self.metadata.spectrogram)])
@ -83,20 +80,24 @@ impl Root {
self.render_artist(artist, buf); self.render_artist(artist, buf);
self.render_time(time, buf); self.render_time(time, buf);
} }
fn render_title(&self, area: Rect, buf: &mut Buffer) { fn render_title(&self, area: Rect, buf: &mut Buffer) {
Paragraph::new(self.metadata.name.clone()) Paragraph::new(self.metadata.name.clone())
.bold() .bold()
.wrap(Wrap { trim: true }) .wrap(Wrap { trim: true })
.render(area, buf); .render(area, buf);
} }
fn render_artist(&self, area: Rect, buf: &mut Buffer) { fn render_artist(&self, area: Rect, buf: &mut Buffer) {
if self.metadata.artist.is_some() { if self.metadata.artist.is_some() {
Paragraph::new(self.metadata.artist.clone().unwrap()).render(area, buf); Paragraph::new(self.metadata.artist.clone().unwrap()).render(area, buf);
} }
} }
fn render_time(&self, area: Rect, buf: &mut Buffer) { fn render_time(&self, area: Rect, buf: &mut Buffer) {
Paragraph::new(format!("{}", format_duration(self.metadata.duration),)).render(area, buf); Paragraph::new(format!("{}", format_duration(self.metadata.duration),)).render(area, buf);
} }
pub fn update_cover(&mut self, cover: DynamicImage) { pub fn update_cover(&mut self, cover: DynamicImage) {
self.metadata.set_cover(cover); self.metadata.set_cover(cover);
self.image_state = self self.image_state = self

View file

@ -1,10 +1,9 @@
use std::io::{BufReader, Read}; use std::io::Read;
use reqwest::{ use reqwest::{
blocking::Response, blocking::Response,
header::{HeaderMap, HeaderValue, ACCEPT, CONTENT_TYPE, RANGE}, header::{HeaderMap, CONTENT_TYPE, RANGE},
}; };
use serde::Serialize;
use crate::{ use crate::{
audio::AudioEvent, audio::AudioEvent,
@ -26,7 +25,6 @@ impl APIClient {
super::APIEvent::FetchRandom => self.get_random()?, super::APIEvent::FetchRandom => self.get_random()?,
super::APIEvent::FetchCoverArt(id) => self.get_cover_art(id)?, super::APIEvent::FetchCoverArt(id) => self.get_cover_art(id)?,
super::APIEvent::StreamSong(id, start, end) => self.stream_song(id, start, end)?, super::APIEvent::StreamSong(id, start, end) => self.stream_song(id, start, end)?,
_ => unimplemented!(),
}; };
self.player_chan.send(player_resp)?; self.player_chan.send(player_resp)?;
} }