Added some test code, comments, refactoring, and updated docserve tool

This commit is contained in:
Anne de Jong 2024-07-06 19:36:51 +02:00
parent 81872c62e1
commit b366c47ca7
14 changed files with 169 additions and 45 deletions

View File

@ -20,8 +20,8 @@ anyhow = "1.0.75"
# Numerics # Numerics
# Optional future feature for ndarray: blas # Optional future feature for ndarray: blas
ndarray = { version = "0.15.3", features = ["rayon"] } ndarray = { version = "0.15.6", features = ["rayon"] }
num = "0.4.1" num = "0.4.3"
# blas-src = { version = "0.8", features = ["openblas"] } # blas-src = { version = "0.8", features = ["openblas"] }
# openblas-src = { version = "0.10", features = ["cblas", "system"] } # openblas-src = { version = "0.10", features = ["cblas", "system"] }
@ -51,11 +51,11 @@ cfg-if = "1.0.0"
reinterpret = "0.2.1" reinterpret = "0.2.1"
# Faster channels for multithreaded communication # Faster channels for multithreaded communication
crossbeam = "0.8.2" crossbeam = "0.8.4"
# Serialization # Serialization
serde = { version = "1.0.193", features = ["derive"] } serde = { version = "1.0.193", features = ["derive"] }
toml = "0.8.8" toml = "0.8.14"
# Initialize array for non-copy type # Initialize array for non-copy type
array-init = "2.1.0" array-init = "2.1.0"
@ -68,15 +68,15 @@ hdf5-sys = { version = "0.8.1", features = ["static"], optional = true }
hdf5 = { version = "0.8.1", optional = true } hdf5 = { version = "0.8.1", optional = true }
# Useful iterator stuff # Useful iterator stuff
itertools = "0.12.0" itertools = "0.13.0"
# For getting timestamps. Only useful when recording. # For getting timestamps. Only useful when recording.
chrono = {version = "0.4.31", optional = true} chrono = {version = "0.4.38", optional = true}
# For getting UUIDs in recording # For getting UUIDs in recording
uuid = { version = "1.6.1", features = ["v4"] , optional = true} uuid = { version = "1.9.1", features = ["v4"] , optional = true}
# Command line argument parser, for CLI apps # Command line argument parser, for CLI apps
clap = { version = "4.4.11", features = ["derive", "color", "help", "suggestions"] } clap = { version = "4.5.8", features = ["derive", "color", "help", "suggestions"] }
# FFT's # FFT's
realfft = "3.3.0" realfft = "3.3.0"

View File

@ -1,10 +1,12 @@
//! Configuration of module. Here, we can choose to compile for 32-bits or 64-bit floating point values //! Configuration of module. Here, we can choose to compile for 32-bits or
//! as basic data storage and computation size. Default is f64. //! 64-bit floating point values as basic data storage and computation size.
//! Default is f64.
//! //!
cfg_if::cfg_if! { cfg_if::cfg_if! {
if #[cfg(feature="f64")] { if #[cfg(feature="f64")] {
/// Floating-point value, compile time option to make it either f32, or f64 /// Floating-point value, compile time option to make it either f32, or
/// f64.
pub type Flt = f64; pub type Flt = f64;
/// Ratio between circumference and diameter of a circle /// Ratio between circumference and diameter of a circle
pub const pi: Flt = std::f64::consts::PI; pub const pi: Flt = std::f64::consts::PI;
@ -67,6 +69,7 @@ pub type Ccol = Array1<Cflt>;
/// 2D array of floats /// 2D array of floats
pub type Dmat = Array2<Flt>; pub type Dmat = Array2<Flt>;
/// 2D array of complex floats /// 2D array of complex floats
pub type Cmat = Array2<Cflt>; pub type Cmat = Array2<Cflt>;

View File

@ -1,4 +1,3 @@
//! Data acquisition model. Provides abstract layers around DAQ devices.
#![allow(non_snake_case)] #![allow(non_snake_case)]
use super::StreamApiDescr; use super::StreamApiDescr;

View File

@ -1,4 +1,15 @@
//! Data acquisition model. Provides abstract layers around DAQ devices. //! Data acquisition code. Provides abstract layers around DAQ devices, creating
//! input and output streams and interact with them (record, create signal
//! generators, filter data, show real time sound levels and so on).
//!
//! - Record data in a `Recording`
//! - Interact with DAQ devices:
//! - Set number of channels, channel names etc.
//! - Enable / disable IEPE
//! - Set datatypes etc.
//!
//! Most of the things are done using a [StreamMgr], which is an object to I/O
//! DAQ data, interact with devices etc.
mod api; mod api;
mod daqconfig; mod daqconfig;

View File

@ -39,7 +39,8 @@ struct StreamInfo<T> {
/// Keep track of whether the stream has been created. To ensure singleton behaviour. /// Keep track of whether the stream has been created. To ensure singleton behaviour.
static smgr_created: AtomicBool = AtomicBool::new(false); static smgr_created: AtomicBool = AtomicBool::new(false);
/// Configure and manage input / output streams. /// Configure and manage input / output streams. This method is supposed to be a
/// SINGLETON. Runtime checks are performed to see whether this is true.
/// ///
#[cfg_attr(feature = "python-bindings", pyclass(unsendable))] #[cfg_attr(feature = "python-bindings", pyclass(unsendable))]
pub struct StreamMgr { pub struct StreamMgr {
@ -112,11 +113,14 @@ impl Default for StreamMgr {
} }
impl StreamMgr { impl StreamMgr {
/// Create new stream manager. A stream manager is supposed to be a singleton. /// Create new stream manager. A stream manager is supposed to be a
/// singleton. Note that we let Rust's ownership model handle that there is
/// only a single [StreamMgr].
/// ///
/// # Panics /// # Panics
/// ///
/// When a StreamMgr object is already alive. /// When a StreamMgr object is already alive.
///
pub fn new() -> StreamMgr { pub fn new() -> StreamMgr {
if smgr_created.load(std::sync::atomic::Ordering::Relaxed) { if smgr_created.load(std::sync::atomic::Ordering::Relaxed) {
panic!("BUG: Only one stream manager is supposed to be a singleton"); panic!("BUG: Only one stream manager is supposed to be a singleton");
@ -603,7 +607,7 @@ impl Drop for StreamMgr {
} }
} }
// Send to all queues, remove queues that are disconnected when found out /// Send to all queues, remove queues that are disconnected when found out
// on the way. // on the way.
fn sendMsgToAllQueuesRemoveUnused(iqueues: &mut InQueues, msg: InStreamMsg) { fn sendMsgToAllQueuesRemoveUnused(iqueues: &mut InQueues, msg: InStreamMsg) {
// Loop over queues. Remove queues that error when we try to send // Loop over queues. Remove queues that error when we try to send
@ -613,11 +617,3 @@ fn sendMsgToAllQueuesRemoveUnused(iqueues: &mut InQueues, msg: InStreamMsg) {
Err(_e) => false, Err(_e) => false,
}); });
} }
/// Daq devices
trait Daq {}
#[cfg(test)]
mod tests {
// #[test]
}

View File

@ -236,6 +236,7 @@ impl<'a, T: AsArray<'a, Flt>> TransferFunction<'a, T> for Biquad {
res res
} }
} }
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use approx::assert_abs_diff_eq; use approx::assert_abs_diff_eq;

View File

@ -1,9 +1,10 @@
//! # Filter implemententations for biquads, series of biquads and banks of series of biquads. //! Contains filter implemententations for [Biquad]s, series of
//! biquads ([SeriesBiquad]) and banks of series of biquads ([BiquadBank]).
//! //!
//! Contains [Biquad], [SeriesBiquad], and [BiquadBank]. These are all constructs that work on //! Contains [Biquad], [SeriesBiquad], and [BiquadBank]. These are all constructs that work on
//! blocks of input data, and apply filters on it. Todo: implement FIR filter. //! blocks of input data, and apply filters on it. TODO: implement FIR filter.
#![allow(non_snake_case)] #![allow(non_snake_case)]
pub use super::config::*; use super::config::*;
mod biquad; mod biquad;
mod biquadbank; mod biquadbank;
@ -12,11 +13,12 @@ mod seriesbiquad;
pub use biquad::Biquad; pub use biquad::Biquad;
pub use biquadbank::BiquadBank; pub use biquadbank::BiquadBank;
pub use dummy::DummyFilter;
pub use seriesbiquad::SeriesBiquad; pub use seriesbiquad::SeriesBiquad;
/// Implementations of this trait are able to DSP-filter input data. /// Implementations of this trait are able to DSP-filter input data.
pub trait Filter: Send { pub trait Filter: Send {
//! The filter trait is implemented by Biquad, SeriesBiquad, and BiquadBank //! The filter trait is implemented by, for example, [Biquad], [SeriesBiquad], and [BiquadBank].
/// Filter input to generate output. A vector of output floats is generated with the same /// Filter input to generate output. A vector of output floats is generated with the same
/// length as input. /// length as input.
@ -38,7 +40,15 @@ where
{ {
/// Compute frequency response (i.e. transfer function from input to output) /// Compute frequency response (i.e. transfer function from input to output)
/// ///
/// Args /// # Args
///
/// * `freq` - The frequency in [Hz]
///
/// # Returns
///
/// The transfer function: A column vector with the frequency response for
/// each frequency in `freq`.
///
fn tf(&self, fs: Flt, freq: T) -> Ccol; fn tf(&self, fs: Flt, freq: T) -> Ccol;
} }

View File

@ -110,8 +110,8 @@ impl<'a, T: AsArray<'a, Flt>> TransferFunction<'a, T> for SeriesBiquad {
fn tf(&self, fs: Flt, freq: T) -> Ccol { fn tf(&self, fs: Flt, freq: T) -> Ccol {
let freq = freq.into(); let freq = freq.into();
let mut res = self.biqs.first().unwrap().tf(fs, freq); let mut res = self.biqs.first().unwrap().tf(fs, freq);
for i in self.biqs.iter().skip(1) { for biq in self.biqs.iter().skip(1) {
res = &res * i.tf(fs, freq); res = &res * biq.tf(fs, freq);
} }
res res
} }
@ -144,4 +144,20 @@ mod test {
inp[1] = 0.5; inp[1] = 0.5;
assert_eq!(&inp, &filtered); assert_eq!(&inp, &filtered);
} }
#[test]
fn test_seriesbiquad_tf1() {
let filter_coefs = vec![1., 0., 0., 1., 0., 0.];
let ser = SeriesBiquad::new(&filter_coefs).unwrap();
let tf = ser.tf(1., &[0., 1.]);
assert_eq!(tf[0].re, 1.0);
assert_eq!(tf[1].im, 0.0);
}
#[test]
fn test_seriesbiquad_tf2() {
let filter_coefs = &[0.5, 0., 0., 1., 0., 0., 0.5, 0., 0., 1., 0., 0.];
let ser = SeriesBiquad::new(filter_coefs).unwrap();
let tf = ser.tf(1., &[0., 1.]);
assert_eq!(tf[0].re, 0.25);
assert_eq!(tf[1].im, 0.0);
}
} }

View File

@ -18,10 +18,7 @@ pub mod daq;
pub mod ps; pub mod ps;
pub mod siggen; pub mod siggen;
mod math; mod math;
use filter::*; use filter::*;
use daq::*;
/// A Python module implemented in Rust. /// A Python module implemented in Rust.
#[cfg(feature = "python-bindings")] #[cfg(feature = "python-bindings")]
@ -29,6 +26,7 @@ use daq::*;
#[pyo3(name="_lasprs")] #[pyo3(name="_lasprs")]
fn lasprs(m: &Bound<'_, PyModule>) -> PyResult<()> { fn lasprs(m: &Bound<'_, PyModule>) -> PyResult<()> {
daq::add_py_classses(m)?;
// Add filter submodule // Add filter submodule
m.add_class::<filter::Biquad>()?; m.add_class::<filter::Biquad>()?;
@ -36,7 +34,6 @@ fn lasprs(m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_class::<filter::BiquadBank>()?; m.add_class::<filter::BiquadBank>()?;
m.add_class::<siggen::Siggen>()?; m.add_class::<siggen::Siggen>()?;
daq::add_py_classses(m)?;
Ok(()) Ok(())
} }

View File

@ -2,4 +2,84 @@
//! long datasets, where nfft << length of data. This way, the variance of a //! long datasets, where nfft << length of data. This way, the variance of a
//! single periodogram is suppressed with increasing number of averages. //! single periodogram is suppressed with increasing number of averages.
//! //!
//! For more information, //! For more information, see the book on numerical recipes.
//!
use super::*;
use crate::config::*;
use anyhow::{bail, Result};
/// Provide the overlap of blocks for computing averaged (cross) power spectra.
/// Can be provided as a percentage of the block size, or as a number of
/// samples.
pub enum Overlap {
Percentage(Flt),
Number(usize),
}
impl Default for Overlap {
fn default() -> Self {
Overlap::Percentage(50.)
}
}
/// Averaged power spectra computing engine
struct AvPowerSpectra {
ps: PowerSpectra,
overlap_skip: usize,
// Time constant for time-weighted power spectra. If None, it averages out
// over all data.
fs_tau: Option<Flt>,
/// The number of frames already used in the average
N: usize,
// Current estimation of the power spectra
cur_est: Cmat
}
impl AvPowerSpectra {
fn get_overlap_skip(nfft: usize, overlap: Option<Overlap>) -> Result<usize> {
let overlap = overlap.unwrap_or_default();
let overlap_skip = match overlap {
Overlap::Number(i) if i >= nfft => bail!("Invalid overlap number of samples"),
Overlap::Number(i) if i < nfft => nfft - i,
Overlap::Percentage(p) if p >= 100. => {
bail!("Invalid overlap percentage. Should be >= 0. And < 100.")
}
Overlap::Percentage(p) => nfft - ((p * nfft as Flt) / 100.) as usize,
_ => unreachable!(),
};
if overlap_skip == 0 || overlap_skip > nfft {
bail!("Computed overlap results in invalid number of overlap samples. Please make sure the FFT length is large enough, when high overlap percentages are required.");
}
Ok(overlap_skip)
}
pub fn new(
nfft: usize,
wt: Option<WindowType>,
overlap: Option<Overlap>,
fs_tau: Option<Flt>,
) -> Result<AvPowerSpectra> {
if nfft % 2 != 0 {
bail!("NFFT should be even")
}
if nfft == 0 {
bail!("Invalid NFFT")
}
if let Some(x) = fs_tau {
if x <= 0.0 {
bail!("Invalid time weighting constant [s]. Should be > 0 if given.")
}
}
let window = Window::new(wt.unwrap_or_default(), nfft);
let ps = PowerSpectra::newFromWindow(window);
let overlap_skip = Self::get_overlap_skip(nfft, overlap)?;
Ok(AvPowerSpectra {
ps,
overlap_skip,
fs_tau,
N: 0,
cur_est: Cmat::default((0,0))
})
}
}

View File

@ -1,4 +1,8 @@
//! Power spectra, averaged power spectra, etc. This module contains several //! Power spectra, averaged power spectra, etc. This module contains several
mod window; pub mod window;
mod ps; pub mod ps;
mod fft; mod fft;
mod aps;
pub use ps::PowerSpectra;
pub use window::{Window, WindowType};

View File

@ -82,6 +82,11 @@ pub enum WindowType {
/// Blackman window /// Blackman window
Blackman = 4, Blackman = 4,
} }
impl Default for WindowType {
fn default() -> Self {
WindowType::Hann
}
}
/// Window (taper) computed from specified window type. /// Window (taper) computed from specified window type.
#[derive(Clone)] #[derive(Clone)]
@ -119,7 +124,9 @@ impl Window {
self.win.len() self.win.len()
} }
} }
//
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use approx::assert_abs_diff_eq; use approx::assert_abs_diff_eq;

View File

@ -6,6 +6,6 @@
# $ cargo install cargo-watch cargo-docserver` # $ cargo install cargo-watch cargo-docserver`
# ``` # ```
# #
cargo watch -s "cargo rustdoc --lib && cargo test ${testargs} && cargo docserver" cargo watch -s "cargo rustdoc --lib && cargo docserve"
# Then open: ${browser} http://localhost:4000 # Then open: ${browser} http://localhost:4000