Merged in lasp from remote
All checks were successful
Building, testing and releasing LASP if it has a tag / Build-Test-Ubuntu (push) Successful in 3m8s
Building, testing and releasing LASP if it has a tag / Release-Ubuntu (push) Has been skipped

This commit is contained in:
Thijs Hekman 2023-12-19 11:24:04 +01:00
commit e5c40c6af3
113 changed files with 469 additions and 433 deletions

View File

@ -1,90 +0,0 @@
kind: pipeline
type: docker
name: archlinux
clone:
depth: 50
steps:
- name: archlinux_build
image: archlinux_build:latest
pull: if-not-exists
volumes:
- name: archlinux_ccache
path: /root/.ccache
commands:
# The following command is not required, we included this in the docker
# image of archlinux_build
# - pacman -S --noconfirm ccache openblas fftw pulseaudio pybind11
- git submodule update --init --recursive
- cmake .
# More than two makes ascee2 irresponsive for now
- make -j2
- name: archlinux_test
image: archlinux_build:latest
pull: if-not-exists
commands:
- scripts/test.sh
volumes:
- name: archlinux_ccache
host:
path: /tmp/archlinux_ccache
---
kind: pipeline
type: docker
name: ubuntu
clone:
depth: 3
volumes:
- name: archlinux_ccache
path: /root/.ccache
steps:
- name: ubuntu_build
image: ubuntu_build:latest
pull: if-not-exists
volumes:
- name: ubuntu_ccache
path: /root/.ccache
environment:
commands:
# The following commands are not required, we included this in the docker
# image of ubuntu_builud
- scripts/build_ubuntu.sh
- name: ubuntu_test
image: ubuntu_build:latest
pull: if-not-exists
commands:
- scripts/test.sh
volumes:
- name: ubuntu_ccache
host:
path: /tmp/ubuntu_ccache
---
kind: pipeline
type: docker
name: documentation_build
clone:
depth: 3
steps:
- name: build_docker_master
image: plugins/docker
settings:
repo: ascee/lasp_ascee_nl
tags: latest
username:
from_secret: docker_username
password:
from_secret: docker_password
when:
branch: master

View File

@ -0,0 +1,51 @@
name: Building, testing and releasing LASP if it has a tag
on:
- push
jobs:
Build-Test-Ubuntu:
runs-on: ubuntu-latest
container:
image: ascee/ubuntu_build:latest
volumes:
- lasp_dist:/dist
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: true
- name: Build and test
run: |
pip install build pytest
python3 -m build
pip install dist/lasp*.whl
pytest
- name: Copy dist files to /dist dir
run:
cp -v dist/* /dist
Release-Ubuntu:
runs-on: ubuntu-latest
container:
volumes:
- lasp_dist:/dist
needs: Build-Test-Ubuntu
if: startsWith(gitea.ref, 'refs/tags/v')
steps:
- name: Checkout
uses: actions/checkout@v4
- name: setup go
uses: https://github.com/actions/setup-go@v4
with:
go-version: '1.18'
- name: Release
uses: https://gitea.com/actions/release-action@main
working-directory: "/"
with:
files: |-
../../../../../dist/**
api_key: '${{secrets.RELEASE_TOKEN}}'

11
.gitignore vendored
View File

@ -6,23 +6,18 @@
.ninja* .ninja*
build.ninja build.ninja
dist dist
src/lasp.egg-info
test/.ipynb_checkpoints
src/lasp/lasp_config.h
_deps _deps
compile_commands.json compile_commands.json
CMakeFiles CMakeFiles
CMakeCache.txt CMakeCache.txt
cmake_install.cmake cmake_install.cmake
Makefile Makefile
build
__pycache__ __pycache__
cython_debug
doc doc
.ropeproject .ropeproject
.ipynb_checkpoints .ipynb_checkpoints
.spyproject .spyproject
.cache
_skbuild
acme_log.log acme_log.log
testenv .venv
.py-build-cmake_cache
cpp_src/lasp_config.h

18
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,18 @@
---
repos:
- repo: https://github.com/commitizen-tools/commitizen
rev: 3.5.3
hooks:
- id: commitizen
- id: commitizen-branch
stages: [push]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
rev: 22.10.0
hooks:
- id: black

7
CHANGELOG.md Normal file
View File

@ -0,0 +1,7 @@
## v1.0.1 (2023-07-19)
### Fix
- Added patch number to semver in pyproject.toml
## v1.0.0 (2023-07-19)

View File

@ -1,7 +1,6 @@
cmake_minimum_required (VERSION 3.16) cmake_minimum_required (VERSION 3.16)
project(LASP LANGUAGES C CXX VERSION 1.0) project(LASP LANGUAGES C CXX VERSION 1.0)
set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED) set(CMAKE_CXX_STANDARD_REQUIRED)
@ -121,7 +120,7 @@ include(portaudio)
include(uldaq) include(uldaq)
# #
add_definitions(-Dgsl_CONFIG_DEFAULTS_VERSION=1) add_definitions(-Dgsl_CONFIG_DEFAULTS_VERSION=1)
add_subdirectory(src/lasp) add_subdirectory(cpp_src)
if(LASP_BUILD_CPP_TESTS) if(LASP_BUILD_CPP_TESTS)
add_subdirectory(test) add_subdirectory(test)
endif() endif()

View File

@ -1,8 +1,5 @@
# Library for Acoustic Signal Processing # Library for Acoustic Signal Processing
- Master branch: [![Build Status](https://drone.ascee.nl/api/badges/ASCEE/lasp/status.svg?ref=refs/heads/master)](https://drone.ascee.nl/ASCEE/lasp)
- Develop branch: [![Build Status](https://drone.ascee.nl/api/badges/ASCEE/lasp/status.svg?ref=refs/heads/develop)](https://drone.ascee.nl/ASCEE/lasp)
Welcome to LASP: Library for Acoustic Signal Processing. LASP is a C++ library Welcome to LASP: Library for Acoustic Signal Processing. LASP is a C++ library
with a Python interface which is supposed to acquire and process (multi) sensor data in real time on a PC and output results. with a Python interface which is supposed to acquire and process (multi) sensor data in real time on a PC and output results.
@ -46,41 +43,48 @@ in a sister repository [lasp-doc](https://code.ascee.nl/ascee/lasp-doc).
If you have any question(s), please feel free to contact us: [email](info@ascee.nl). If you have any question(s), please feel free to contact us: [email](info@ascee.nl).
# Installation - Linux (Debian-based) # Installation - Linux (Ubuntu-based)
## Dependencies ## From wheel (recommended for non-developers)
One-liner: ### Prerequisites
- `$ sudo apt install python3-pybind11 libopenblas-dev python3-pip python3-scipy libusb-1.0-0-dev libpulse-dev cmake-curses-gui python3-h5py python3-dataclasses-json python3-matplotlib python3-appdirs` Run the following on the command line to install all prerequisites on
Debian-based Linux:
- `sudo apt install python3-pip libfftw3-3 libopenblas-base libusb-1.0-0
libpulse0`
### Download and install LASP
Go to: [LASP releases](https://code.ascee.nl/ASCEE/lasp/releases/latest/) and
download the latest `.whl`. Then run:
- `pip install lasp-*-linux_x86_64.whl`
## From source (Ubuntu-based)
### Prerequisites
Run the following one-liner:
- `sudo apt install -y git python3 python3-virtualenv python3-venv libopenblas-dev python3-pip libfftw3-dev libusb-1.0-0-dev libpulse-dev python3-build`
If building RtAudio with the ALSA backend, you will also require the following packages: If building RtAudio with the ALSA backend, you will also require the following packages:
- libclalsadrv-dev - `sudo apt install libclalsadrv-dev`
If building RtAudio with the Jack Audio Connection Kit (JACK) backend, you will also require the following packages: If building RtAudio with the Jack Audio Connection Kit (JACK) backend, you will also require the following packages:
- libjack-jackd2-dev - `sudo apt install libjack-jackd2-dev`
## Download & build ### Download & build
- `$ git clone --recursive https://code.ascee.nl/ASCEE/lasp.git` - `$ git clone --recursive https://code.ascee.nl/ASCEE/lasp.git`
- `$ cd lasp` - `$ cd lasp`
- `pip install -e .`
For a release build: # Installation - (x86_64) Windows (with WinPython), build with MSYS2 (NOT YET UPDATED!!)
- `$ cmake .`
or optionally for a custom build:
- `$ ccmake .`
Configure and run:
- `$ make -j`
# Installation - (x86_64) Windows (with WinPython), build with MSYS2
## Prerequisites ## Prerequisites
@ -113,7 +117,11 @@ Configure and run:
# Documentation # Documentation
In directory: ## Online
[Online LASP documentation](https://lasp.ascee.nl/).
## In directory
`$ sudo apt install doxygen graphviz` `$ sudo apt install doxygen graphviz`
`$ pip install doxypypy` `$ pip install doxypypy`
@ -126,21 +134,7 @@ This will build the documentation. It can be read by:
`$ <YOUR-BROWSER> doc/html/index.html` `$ <YOUR-BROWSER> doc/html/index.html`
Or via docker: # Usage
`$ docker build -t lasp_ascee_nl:latest .`
## Install
For an editable install (while developing):
- `$ pip3 install --prefix=$HOME/.local -e .`
To install locally, for a fixed version:
- `$ pip3 install --prefix=$HOME/.local`
## Usage
- See examples directories for IPython notebooks. - See examples directories for IPython notebooks.
- Please refer to the [documentation](https://lasp.ascee.nl/) for features. - Please refer to the [documentation](https://lasp.ascee.nl/) for features.

53
cpp_src/CMakeLists.txt Normal file
View File

@ -0,0 +1,53 @@
# src/lasp/CMakeLists.txt
# Armadillo, don't build the wrapper lib, but instead directly link to
# openblas.
add_definitions(-DARMA_DONT_USE_WRAPPER)
configure_file(lasp_config.h.in lasp_config.h)
include_directories(${CMAKE_CURRENT_BINARY_DIR})
include_directories(SYSTEM
${PROJECT_SOURCE_DIR}/third_party/armadillo-code/include)
include_directories(${PROJECT_SOURCE_DIR}/third_party/DebugTrace-cpp/include)
include_directories(${PROJECT_SOURCE_DIR}/third_party/gsl-lite/include)
include_directories(${PROJECT_SOURCE_DIR}/third_party/tomlplusplus/include)
include_directories(${PROJECT_SOURCE_DIR}/third_party/thread-pool)
if(LASP_HAS_RTAUDIO)
include_directories(${PROJECT_SOURCE_DIR}/third_party/rtaudio)
endif()
if(LASP_HAS_ULDAQ)
include_directories(${PROJECT_SOURCE_DIR}/third_party/uldaq/src)
endif()
add_subdirectory(device)
add_subdirectory(dsp)
pybind11_add_module(lasp_cpp MODULE lasp_cpp.cpp
pybind11/lasp_deviceinfo.cpp
pybind11/lasp_daqconfig.cpp
pybind11//lasp_dsp_pybind.cpp
pybind11/lasp_streammgr.cpp
pybind11/lasp_daq.cpp
pybind11/lasp_deviceinfo.cpp
pybind11/lasp_pyindatahandler.cpp
pybind11/lasp_siggen.cpp
)
target_link_libraries(lasp_cpp PRIVATE lasp_device_lib lasp_dsp_lib
${OpenMP_CXX_LIBRARIES} ${LASP_FFT_LIBS} ${TARGET_OS_LINKLIBS})
# Install the Python module
install(TARGETS lasp_cpp
EXCLUDE_FROM_ALL
COMPONENT python_modules
DESTINATION ${PY_BUILD_CMAKE_MODULE_NAME})
# Install the debug file for the Python module (Windows only)
if (WIN32)
install(FILES $<TARGET_PDB_FILE:_add_module>
EXCLUDE_FROM_ALL
COMPONENT python_modules
DESTINATION ${PY_BUILD_CMAKE_MODULE_NAME}
OPTIONAL)
endif()

View File

@ -9,9 +9,6 @@
#ifndef LASP_CONFIG_H #ifndef LASP_CONFIG_H
#define LASP_CONFIG_H #define LASP_CONFIG_H
const int LASP_VERSION_MAJOR = @CMAKE_PROJECT_VERSION_MAJOR@;
const int LASP_VERSION_MINOR = @CMAKE_PROJECT_VERSION_MINOR@;
/* Debug flag */ /* Debug flag */
#cmakedefine01 LASP_DEBUG #cmakedefine01 LASP_DEBUG

View File

@ -52,12 +52,5 @@ PYBIND11_MODULE(lasp_cpp, m) {
init_datahandler(m); init_datahandler(m);
init_siggen(m); init_siggen(m);
// We store the version number of the code via CMake, and create an
// attribute in the C++ code.
m.attr("__version__") = std::to_string(LASP_VERSION_MAJOR) + "." +
std::to_string(LASP_VERSION_MINOR);
m.attr("LASP_VERSION_MAJOR") = LASP_VERSION_MAJOR;
m.attr("LASP_VERSION_MINOR") = LASP_VERSION_MINOR;
} }
/** @} */ /** @} */

55
pyproject.toml Normal file
View File

@ -0,0 +1,55 @@
[project]
name = "lasp"
readme = "README.md"
requires-python = ">=3.10"
description = "Library for Acoustic Signal Processing"
license = { "file" = "LICENSE" }
authors = [{ "name" = "J.A. de Jong", "email" = "j.a.dejong@ascee.nl" }]
version = "1.0.1"
keywords = ["DSP", "DAQ", "Signal processing"]
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Topic :: Scientific/Engineering",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
]
urls = { "Documentation" = "https://lasp.ascee.nl" }
dependencies = ["scipy", "numpy", "matplotlib>=3.7.2", "appdirs",
"dataclasses_json", "h5py"]
[build-system] # How pip and other frontends should build this project
requires = ["py-build-cmake~=0.1.8", "pybind11" ]
build-backend = "py_build_cmake.build"
[tool.py-build-cmake.module] # Where to find the Python module to package
directory = "python_src"
[tool.py-build-cmake.sdist] # What to include in source distributions
include = ["CMakeLists.txt", "cmake", "cpp_src", "python_src", "img", "scripts",
"third_party"]
[tool.py-build-cmake.cmake] # How to build the CMake project
build_type = "Release"
source_path = "."
build_args = ["-j12"]
install_components = ["python_modules"]
[tool.py-build-cmake.editable]
# This might not work properly on Windows. Comment this out when testing on
# Windows.
mode = "symlink"
[tool.commitizen]
name = "cz_conventional_commits"
tag_format = "v$version"
version_scheme = "semver"
version_provider = "pep621"
update_changelog_on_bump = true

View File

@ -2,10 +2,11 @@
LASP: Library for Acoustic Signal Processing LASP: Library for Acoustic Signal Processing
""" """
from .lasp_cpp import *
import lasp.lasp_cpp
from .lasp_version import __version__
from .lasp_common import * from .lasp_common import *
__version__ = lasp_cpp.__version__ from .lasp_cpp import *
# from .lasp_imptube import * # TwoMicImpedanceTube # from .lasp_imptube import * # TwoMicImpedanceTube
from .lasp_measurement import * # Measurement, scaleBlockSens from .lasp_measurement import * # Measurement, scaleBlockSens
@ -14,6 +15,7 @@ from .lasp_slm import * # SLM, Dummy
from .lasp_record import * # RecordStatus, Recording from .lasp_record import * # RecordStatus, Recording
from .lasp_daqconfigs import * from .lasp_daqconfigs import *
from .lasp_measurementset import * from .lasp_measurementset import *
# from .lasp_siggen import * # SignalType, NoiseType, SiggenMessage, SiggenData, Siggen # from .lasp_siggen import * # SignalType, NoiseType, SiggenMessage, SiggenData, Siggen
# from .lasp_weighcal import * # WeighCal # from .lasp_weighcal import * # WeighCal
# from .tools import * # SmoothingType, smoothSpectralData, SmoothingWidth # from .tools import * # SmoothingType, smoothSpectralData, SmoothingWidth

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from .lasp_cpp import DaqConfiguration, LASP_VERSION_MAJOR from .lasp_cpp import DaqConfiguration
from .lasp_version import LASP_VERSION_MAJOR
"""! """!
Author: J.A. de Jong - ASCEE Author: J.A. de Jong - ASCEE

View File

@ -1,6 +1,7 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from __future__ import annotations from __future__ import annotations
"""! """!
Author: J.A. de Jong - ASCEE Author: J.A. de Jong - ASCEE
@ -43,7 +44,7 @@ The video dataset can possibly be not present in the data.
""" """
__all__ = ['Measurement', 'scaleBlockSens'] __all__ = ["Measurement", "scaleBlockSens"]
from contextlib import contextmanager from contextlib import contextmanager
import h5py as h5 import h5py as h5
import numpy as np import numpy as np
@ -51,7 +52,8 @@ from .lasp_config import LASP_NUMPY_FLOAT_TYPE
from scipy.io import wavfile from scipy.io import wavfile
import os, time, wave, logging import os, time, wave, logging
from .lasp_common import SIQtys, Qty, getFreq from .lasp_common import SIQtys, Qty, getFreq
from .lasp_cpp import Window, DaqChannel, LASP_VERSION_MAJOR, AvPowerSpectra from .lasp_version import LASP_VERSION_MAJOR, LASP_VERSION_MINOR
from .lasp_cpp import Window, DaqChannel, AvPowerSpectra
from typing import List from typing import List
from functools import lru_cache from functools import lru_cache
@ -72,7 +74,7 @@ def getSampWidth(dtype):
elif dtype == np.float64: elif dtype == np.float64:
return 8 return 8
else: else:
raise ValueError('Invalid data type: %s' % dtype) raise ValueError("Invalid data type: %s" % dtype)
def scaleBlockSens(block, sens): def scaleBlockSens(block, sens):
@ -89,7 +91,7 @@ def scaleBlockSens(block, sens):
sw = getSampWidth(block.dtype) sw = getSampWidth(block.dtype)
fac = 2 ** (8 * sw - 1) - 1 fac = 2 ** (8 * sw - 1) - 1
else: else:
fac = 1. fac = 1.0
return block.astype(LASP_NUMPY_FLOAT_TYPE) / fac / sens[np.newaxis, :] return block.astype(LASP_NUMPY_FLOAT_TYPE) / fac / sens[np.newaxis, :]
@ -107,7 +109,7 @@ class IterRawData:
""" """
assert isinstance(channels, list) assert isinstance(channels, list)
fa = f['audio'] fa = f["audio"]
self.fa = fa self.fa = fa
self.i = 0 self.i = 0
@ -117,8 +119,8 @@ class IterRawData:
# nchannels = fa.shape[2] # nchannels = fa.shape[2]
self.channels = channels self.channels = channels
self.istart = kwargs.pop('istart', 0) self.istart = kwargs.pop("istart", 0)
self.istop = kwargs.pop('istop', blocksize*nblocks) self.istop = kwargs.pop("istop", blocksize * nblocks)
self.firstblock = self.istart // blocksize self.firstblock = self.istart // blocksize
self.lastblock = self.istop // blocksize self.lastblock = self.istop // blocksize
@ -130,7 +132,7 @@ class IterRawData:
if self.istop < 0: if self.istop < 0:
self.istop += blocksize * nblocks self.istop += blocksize * nblocks
if self.istop <= self.istart: if self.istop <= self.istart:
raise ValueError('Stop index is smaller than start index') raise ValueError("Stop index is smaller than start index")
if self.istop != blocksize * nblocks: if self.istop != blocksize * nblocks:
self.lastblock_stop_offset = self.istop % blocksize self.lastblock_stop_offset = self.istop % blocksize
@ -171,6 +173,7 @@ class IterData(IterRawData):
Iterate over blocks of data, scaled with sensitivity and integer scaling Iterate over blocks of data, scaled with sensitivity and integer scaling
between 0 and 1 between 0 and 1
""" """
def __init__(self, fa, channels, sensitivity, **kwargs): def __init__(self, fa, channels, sensitivity, **kwargs):
super().__init__(fa, channels, **kwargs) super().__init__(fa, channels, **kwargs)
self.sens = np.asarray(sensitivity)[self.channels] self.sens = np.asarray(sensitivity)[self.channels]
@ -187,8 +190,8 @@ class Measurement:
def __init__(self, fn): def __init__(self, fn):
"""Initialize a Measurement object based on the filename.""" """Initialize a Measurement object based on the filename."""
if '.h5' not in fn: if ".h5" not in fn:
fn += '.h5' fn += ".h5"
# Full filepath # Full filepath
self.fn = fn self.fn = fn
@ -198,26 +201,26 @@ class Measurement:
# Open the h5 file in read-plus mode, to allow for changing the # Open the h5 file in read-plus mode, to allow for changing the
# measurement comment. # measurement comment.
with h5.File(fn, 'r') as f: with h5.File(fn, "r") as f:
# Check for video data # Check for video data
try: try:
f['video'] f["video"]
self.has_video = True self.has_video = True
except KeyError: except KeyError:
self.has_video = False self.has_video = False
self.nblocks, self.blocksize, self.nchannels = f['audio'].shape self.nblocks, self.blocksize, self.nchannels = f["audio"].shape
dtype = f['audio'].dtype dtype = f["audio"].dtype
self.dtype = dtype self.dtype = dtype
self.sampwidth = getSampWidth(dtype) self.sampwidth = getSampWidth(dtype)
self.samplerate = f.attrs['samplerate'] self.samplerate = f.attrs["samplerate"]
self.N = (self.nblocks * self.blocksize) self.N = self.nblocks * self.blocksize
self.T = self.N / self.samplerate self.T = self.N / self.samplerate
try: try:
self.version_major = f.attrs['LASP_VERSION_MAJOR'] self.version_major = f.attrs["LASP_VERSION_MAJOR"]
self.version_minor = f.attrs['LASP_VERSION_MINOR'] self.version_minor = f.attrs["LASP_VERSION_MINOR"]
except KeyError: except KeyError:
self.version_major = 0 self.version_major = 0
self.version_minor = 1 self.version_minor = 1
@ -225,45 +228,47 @@ class Measurement:
# Due to a previous bug, the channel names were not stored # Due to a previous bug, the channel names were not stored
# consistently, i.e. as 'channel_names' and later camelcase. # consistently, i.e. as 'channel_names' and later camelcase.
try: try:
self._channelNames = f.attrs['channelNames'] self._channelNames = f.attrs["channelNames"]
except KeyError: except KeyError:
try: try:
self._channelNames = f.attrs['channel_names'] self._channelNames = f.attrs["channel_names"]
logging.info("Measurement file obtained which stores channel names with *old* attribute 'channel_names'") logging.info(
"Measurement file obtained which stores channel names with *old* attribute 'channel_names'"
)
except KeyError: except KeyError:
# No channel names found in measurement file # No channel names found in measurement file
logging.info('No channel name data found in measurement') logging.info("No channel name data found in measurement")
self._channelNames = [f'Unnamed {i}' for i in range(self.nchannels)] self._channelNames = [f"Unnamed {i}" for i in range(self.nchannels)]
# comment = read-write thing # comment = read-write thing
if 'comment' in f.attrs: if "comment" in f.attrs:
self._comment = f.attrs['comment'] self._comment = f.attrs["comment"]
else: else:
self._comment = '' self._comment = ""
# Sensitivity # Sensitivity
try: try:
sens = f.attrs['sensitivity'] sens = f.attrs["sensitivity"]
self._sens = sens * \ self._sens = (
np.ones(self.nchannels) if isinstance( sens * np.ones(self.nchannels) if isinstance(sens, float) else sens
sens, float) else sens )
except KeyError: except KeyError:
self._sens = np.ones(self.nchannels) self._sens = np.ones(self.nchannels)
# The time is cached AND ALWAYS ASSUMED TO BE AN IMMUTABLE OBJECT. # The time is cached AND ALWAYS ASSUMED TO BE AN IMMUTABLE OBJECT.
# It is also cached. Changing the measurement timestamp should not # It is also cached. Changing the measurement timestamp should not
# be done. # be done.
self._time = f.attrs['time'] self._time = f.attrs["time"]
# Quantity stored as channel. # Quantity stored as channel.
self._qtys = None self._qtys = None
try: try:
qtys_enum_idx = f.attrs['qtys_enum_idx'] qtys_enum_idx = f.attrs["qtys_enum_idx"]
self._qtys = [SIQtys.fromInt(idx) for idx in qtys_enum_idx] self._qtys = [SIQtys.fromInt(idx) for idx in qtys_enum_idx]
except KeyError: except KeyError:
try: try:
qtys_json = f.attrs['qtys'] qtys_json = f.attrs["qtys"]
# Load quantity data # Load quantity data
self._qtys = [Qty.from_json(qty_json) for qty_json in qtys_json] self._qtys = [Qty.from_json(qty_json) for qty_json in qtys_json]
except KeyError: except KeyError:
@ -273,17 +278,19 @@ class Measurement:
if self._qtys is None: if self._qtys is None:
self._qtys = [SIQtys.default() for i in range(self.nchannels)] self._qtys = [SIQtys.default() for i in range(self.nchannels)]
logging.debug(f'Physical quantity data not available in measurement file. Assuming {SIQtys.default}') logging.debug(
f"Physical quantity data not available in measurement file. Assuming {SIQtys.default}"
)
def setAttribute(self, atrname, value): def setAttribute(self, atrname, value):
""" """
Set an attribute in the measurement file, and keep a local copy in Set an attribute in the measurement file, and keep a local copy in
memory for efficient accessing. memory for efficient accessing.
""" """
with self.file('r+') as f: with self.file("r+") as f:
# Update comment attribute in the file # Update comment attribute in the file
f.attrs[atrname] = value f.attrs[atrname] = value
setattr(self, '_' + atrname, value) setattr(self, "_" + atrname, value)
@property @property
def name(self): def name(self):
@ -297,14 +304,13 @@ class Measurement:
@channelNames.setter @channelNames.setter
def channelNames(self, newchnames): def channelNames(self, newchnames):
if len(newchnames) != self.nchannels: if len(newchnames) != self.nchannels:
raise RuntimeError('Invalid length of new channel names') raise RuntimeError("Invalid length of new channel names")
self.setAttribute('channelNames', newchnames) self.setAttribute("channelNames", newchnames)
@property @property
def channelConfig(self): def channelConfig(self):
chcfg = [] chcfg = []
for chname, sens, qty in zip(self.channelNames, self.sensitivity, for chname, sens, qty in zip(self.channelNames, self.sensitivity, self.qtys):
self.qtys):
ch = DaqChannel() ch = DaqChannel()
ch.enabled = True ch.enabled = True
ch.name = chname ch.name = chname
@ -334,26 +340,26 @@ class Measurement:
@qtys.setter @qtys.setter
def qtys(self, newqtys): def qtys(self, newqtys):
if not len(newqtys) == len(self._qtys): if not len(newqtys) == len(self._qtys):
raise ValueError('Invalid number of quantities') raise ValueError("Invalid number of quantities")
qtys_int = [qty.toInt() for qty in newqtys] qtys_int = [qty.toInt() for qty in newqtys]
# Use setAttribute here, but thos store the jsonified version as well, # Use setAttribute here, but thos store the jsonified version as well,
# which we have to overwrite again with the deserialized ones. This is # which we have to overwrite again with the deserialized ones. This is
# actually not a very nice way of coding. # actually not a very nice way of coding.
with self.file('r+') as f: with self.file("r+") as f:
# Update comment attribute in the file # Update comment attribute in the file
f.attrs['qtys_enum_idx'] = qtys_int f.attrs["qtys_enum_idx"] = qtys_int
self._qtys = newqtys self._qtys = newqtys
@contextmanager @contextmanager
def file(self, mode='r'): def file(self, mode="r"):
"""Contextmanager which opens the storage file and yields the file. """Contextmanager which opens the storage file and yields the file.
Args: Args:
mode: Opening mode for the file. Should either be 'r', or 'r+' mode: Opening mode for the file. Should either be 'r', or 'r+'
""" """
if mode not in ('r', 'r+'): if mode not in ("r", "r+"):
raise ValueError('Invalid file opening mode.') raise ValueError("Invalid file opening mode.")
with h5.File(self.fn, mode) as f: with h5.File(self.fn, mode) as f:
yield f yield f
@ -373,9 +379,9 @@ class Measurement:
Args: Args:
cmt: Comment text string to set cmt: Comment text string to set
""" """
with self.file('r+') as f: with self.file("r+") as f:
# Update comment attribute in the file # Update comment attribute in the file
f.attrs['comment'] = cmt f.attrs["comment"] = cmt
self._comment = cmt self._comment = cmt
@property @property
@ -400,7 +406,7 @@ class Measurement:
""" """
time_struct = time.localtime(self.time) time_struct = time.localtime(self.time)
time_string = time.strftime('%Y-%m-%d %H:%M:%S', time_struct) time_string = time.strftime("%Y-%m-%d %H:%M:%S", time_struct)
return time_string return time_string
def rms(self, channels=None, substract_average=False): def rms(self, channels=None, substract_average=False):
@ -415,8 +421,8 @@ class Measurement:
Returns: Returns:
1D array with rms values for each channel 1D array with rms values for each channel
""" """
meansquare = 0. # Mean square of the signal, including its average meansquare = 0.0 # Mean square of the signal, including its average
sum_ = 0. # Sumf of the values of the signal, used to compute average sum_ = 0.0 # Sumf of the values of the signal, used to compute average
N = 0 N = 0
with self.file() as f: with self.file() as f:
for block in self.iterData(channels): for block in self.iterData(channels):
@ -461,7 +467,7 @@ class Measurement:
return np.concatenate(rawdata, axis=0) return np.concatenate(rawdata, axis=0)
def iterData(self, channels, **kwargs): def iterData(self, channels, **kwargs):
sensitivity = kwargs.pop('sensitivity', self.sensitivity) sensitivity = kwargs.pop("sensitivity", self.sensitivity)
if channels is None: if channels is None:
channels = list(range(self.nchannels)) channels = list(range(self.nchannels))
with self.file() as f: with self.file() as f:
@ -496,9 +502,9 @@ class Measurement:
Cross-power-spectra. C[freq, ch_i, ch_j] = C_ij Cross-power-spectra. C[freq, ch_i, ch_j] = C_ij
""" """
nfft = kwargs.pop('nfft', 2048) nfft = kwargs.pop("nfft", 2048)
window = kwargs.pop('windowType', Window.WindowType.Hann) window = kwargs.pop("windowType", Window.WindowType.Hann)
overlap = kwargs.pop('overlap', 50.0) overlap = kwargs.pop("overlap", 50.0)
if channels is None: if channels is None:
channels = list(range(self.nchannels)) channels = list(range(self.nchannels))
@ -541,7 +547,9 @@ class Measurement:
# the current block # the current block
en = [None] + [blocks[i] - blocks[i - 1] for i in range(1, Nblocks)] en = [None] + [blocks[i] - blocks[i - 1] for i in range(1, Nblocks)]
noise_est = [None] + [-np.average(en[i]*en[i+1]) for i in range(1,len(en)-1)] noise_est = [None] + [
-np.average(en[i] * en[i + 1]) for i in range(1, len(en) - 1)
]
# Create weighting coefficients # Create weighting coefficients
sum_inverse_noise = sum([1 / n for n in noise_est[1:]]) sum_inverse_noise = sum([1 / n for n in noise_est[1:]])
@ -578,7 +586,6 @@ class Measurement:
return freq, CS return freq, CS
@property @property
def sensitivity(self): def sensitivity(self):
"""Sensitivity of the data in U^-1, from floating point data scaled """Sensitivity of the data in U^-1, from floating point data scaled
@ -607,9 +614,9 @@ class Measurement:
valid &= sens.shape[0] == self.nchannels valid &= sens.shape[0] == self.nchannels
valid &= sens.dtype == float valid &= sens.dtype == float
if not valid: if not valid:
raise ValueError('Invalid sensitivity value(s) given') raise ValueError("Invalid sensitivity value(s) given")
with self.file('r+') as f: with self.file("r+") as f:
f.attrs['sensitivity'] = sens f.attrs["sensitivity"] = sens
self._sens = sens self._sens = sens
def checkOverflow(self, channels): def checkOverflow(self, channels):
@ -621,7 +628,7 @@ class Measurement:
for block in self.iterData(channels): for block in self.iterData(channels):
dtype = block.dtype dtype = block.dtype
if dtype.kind == 'i': if dtype.kind == "i":
# minvalue = np.iinfo(dtype).min # minvalue = np.iinfo(dtype).min
maxvalue = np.iinfo(dtype).max maxvalue = np.iinfo(dtype).max
if np.max(np.abs(block)) >= 0.9 * maxvalue: if np.max(np.abs(block)) >= 0.9 * maxvalue:
@ -631,9 +638,7 @@ class Measurement:
return False return False
return False return False
def exportAsWave(self, fn=None, force=False, dtype=None, normalize=False, **kwargs):
def exportAsWave(self, fn=None, force=False, dtype=None,
normalize=False, **kwargs):
"""Export measurement file as wave. In case the measurement data is """Export measurement file as wave. In case the measurement data is
stored as floats, the values are scaled to the proper integer (PCM) stored as floats, the values are scaled to the proper integer (PCM)
data format. data format.
@ -654,14 +659,16 @@ class Measurement:
fn = self.fn fn = self.fn
fn = os.path.splitext(fn)[0] fn = os.path.splitext(fn)[0]
if os.path.splitext(fn)[1] != '.wav': if os.path.splitext(fn)[1] != ".wav":
fn += '.wav' fn += ".wav"
if os.path.exists(fn) and not force: if os.path.exists(fn) and not force:
raise RuntimeError(f'File already exists: {fn}') raise RuntimeError(f"File already exists: {fn}")
if not np.isclose(self.samplerate % 1, 0): if not np.isclose(self.samplerate % 1, 0):
raise RuntimeError(f'Sample rates should be approximately integer for exporting to Wave to work') raise RuntimeError(
f"Sample rates should be approximately integer for exporting to Wave to work"
)
# TODO: With VERY large measurment files, this is not possible! Is this # TODO: With VERY large measurment files, this is not possible! Is this
# a theoretical case? # a theoretical case?
@ -678,15 +685,15 @@ class Measurement:
logging.debug(f"dtype not passed as arg; using dtype = {dtype}") logging.debug(f"dtype not passed as arg; using dtype = {dtype}")
# dtype conversion # dtype conversion
if dtype=='int16': if dtype == "int16":
newtype = np.int16 newtype = np.int16
newsampwidth = 2 newsampwidth = 2
elif dtype=='int32': elif dtype == "int32":
newtype = np.int32 newtype = np.int32
newsampwidth = 4 newsampwidth = 4
elif dtype=='float32': elif dtype == "float32":
newtype = np.float32 newtype = np.float32
elif dtype=='float64': elif dtype == "float64":
newtype = np.float64 newtype = np.float64
else: else:
logging.debug(f"cannot handle this dtype {dtype}") logging.debug(f"cannot handle this dtype {dtype}")
@ -697,7 +704,7 @@ class Measurement:
sensone = np.ones_like(self.sensitivity) sensone = np.ones_like(self.sensitivity)
data = scaleBlockSens(data, sensone) data = scaleBlockSens(data, sensone)
if dtype=='int16' or dtype=='int32': if dtype == "int16" or dtype == "int32":
# Scale data to integer range and convert to integers # Scale data to integer range and convert to integers
scalefac = 2 ** (8 * newsampwidth - 1) - 1 scalefac = 2 ** (8 * newsampwidth - 1) - 1
data = (data * scalefac).astype(newtype) data = (data * scalefac).astype(newtype)
@ -705,14 +712,16 @@ class Measurement:
wavfile.write(fn, int(self.samplerate), data.astype(newtype)) wavfile.write(fn, int(self.samplerate), data.astype(newtype))
@staticmethod @staticmethod
def fromtxt(fn, def fromtxt(
fn,
skiprows, skiprows,
samplerate, samplerate,
sensitivity, sensitivity,
mfn=None, mfn=None,
timestamp=None, timestamp=None,
delimiter='\t', delimiter="\t",
firstcoltime=True): firstcoltime=True,
):
"""Converts a txt file to a LASP Measurement file, opens the associated """Converts a txt file to a LASP Measurement file, opens the associated
Measurement object and returns it. The measurement file will have the Measurement object and returns it. The measurement file will have the
same file name as the txt file, except with h5 extension. same file name as the txt file, except with h5 extension.
@ -732,50 +741,57 @@ class Measurement:
sample time. sample time.
""" """
if not os.path.exists(fn): if not os.path.exists(fn):
raise ValueError(f'File {fn} does not exist.') raise ValueError(f"File {fn} does not exist.")
if timestamp is None: if timestamp is None:
timestamp = os.path.getmtime(fn) timestamp = os.path.getmtime(fn)
if mfn is None: if mfn is None:
mfn = os.path.splitext(fn)[0] + '.h5' mfn = os.path.splitext(fn)[0] + ".h5"
else: else:
mfn = os.path.splitext(mfn)[0] + '.h5' mfn = os.path.splitext(mfn)[0] + ".h5"
dat = np.loadtxt(fn, skiprows=skiprows, delimiter=delimiter) dat = np.loadtxt(fn, skiprows=skiprows, delimiter=delimiter)
if firstcoltime: if firstcoltime:
time = dat[:, 0] time = dat[:, 0]
if not np.isclose(time[1] - time[0], 1 / samplerate): if not np.isclose(time[1] - time[0], 1 / samplerate):
raise ValueError('Samplerate given does not agree with ' raise ValueError(
'samplerate in file') "Samplerate given does not agree with " "samplerate in file"
)
# Chop off first column # Chop off first column
dat = dat[:, 1:] dat = dat[:, 1:]
nchannels = dat.shape[1] nchannels = dat.shape[1]
if nchannels != sensitivity.shape[0]: if nchannels != sensitivity.shape[0]:
raise ValueError( raise ValueError(
f'Invalid sensitivity length given. Should be: {nchannels}') f"Invalid sensitivity length given. Should be: {nchannels}"
)
with h5.File(mfn, 'w') as hf: with h5.File(mfn, "w") as hf:
hf.attrs['samplerate'] = samplerate hf.attrs["samplerate"] = samplerate
hf.attrs['sensitivity'] = sensitivity hf.attrs["sensitivity"] = sensitivity
hf.attrs['time'] = timestamp hf.attrs["time"] = timestamp
hf.attrs['blocksize'] = 1 hf.attrs["blocksize"] = 1
hf.attrs['nchannels'] = nchannels hf.attrs["nchannels"] = nchannels
ad = hf.create_dataset('audio', (1, dat.shape[0], dat.shape[1]), ad = hf.create_dataset(
"audio",
(1, dat.shape[0], dat.shape[1]),
dtype=dat.dtype, dtype=dat.dtype,
maxshape=(1, dat.shape[0], dat.shape[1]), maxshape=(1, dat.shape[0], dat.shape[1]),
compression='gzip') compression="gzip",
)
ad[0] = dat ad[0] = dat
return Measurement(mfn) return Measurement(mfn)
@staticmethod @staticmethod
def fromnpy(data, def fromnpy(
data,
samplerate, samplerate,
sensitivity, sensitivity,
mfn, mfn,
timestamp=None, timestamp=None,
qtys: List[SIQtys] = None, qtys: List[SIQtys] = None,
channelNames: List[str] = None, channelNames: List[str] = None,
force=False) -> Measurement: force=False,
) -> Measurement:
""" """
Converts a numpy array to a LASP Measurement file, opens the Converts a numpy array to a LASP Measurement file, opens the
associated Measurement object and returns it. The measurement file will associated Measurement object and returns it. The measurement file will
@ -802,27 +818,27 @@ class Measurement:
force: If True, overwrites existing files with specified `mfn` force: If True, overwrites existing files with specified `mfn`
name. name.
""" """
if os.path.splitext(mfn)[1] != '.h5': if os.path.splitext(mfn)[1] != ".h5":
mfn += '.h5' mfn += ".h5"
if os.path.exists(mfn) and not force: if os.path.exists(mfn) and not force:
raise ValueError(f'File {mfn} already exist.') raise ValueError(f"File {mfn} already exist.")
if timestamp is None: if timestamp is None:
timestamp = int(time.time()) timestamp = int(time.time())
if data.ndim != 2: if data.ndim != 2:
data = data[:, np.newaxis] data = data[:, np.newaxis]
try: try:
len(sensitivity) len(sensitivity)
except: except:
raise ValueError('Sensitivity should be given as array-like data type') raise ValueError("Sensitivity should be given as array-like data type")
sensitivity = np.asarray(sensitivity) sensitivity = np.asarray(sensitivity)
nchannels = data.shape[1] nchannels = data.shape[1]
if nchannels != sensitivity.shape[0]: if nchannels != sensitivity.shape[0]:
raise ValueError( raise ValueError(
f'Invalid sensitivity length given. Should be: {nchannels}') f"Invalid sensitivity length given. Should be: {nchannels}"
)
if channelNames is not None: if channelNames is not None:
if len(channelNames) != nchannels: if len(channelNames) != nchannels:
@ -835,24 +851,27 @@ class Measurement:
raise RuntimeError("Illegal length of qtys list given") raise RuntimeError("Illegal length of qtys list given")
with h5.File(mfn, 'w') as hf: with h5.File(mfn, "w") as hf:
hf.attrs['samplerate'] = samplerate hf.attrs["samplerate"] = samplerate
hf.attrs['sensitivity'] = sensitivity hf.attrs["sensitivity"] = sensitivity
hf.attrs['time'] = timestamp hf.attrs["time"] = timestamp
hf.attrs['blocksize'] = 1 hf.attrs["blocksize"] = 1
hf.attrs['nchannels'] = nchannels hf.attrs["nchannels"] = nchannels
# Add physical quantity indices # Add physical quantity indices
hf.attrs['qtys_enum_idx'] = [qty.toInt() for qty in qtys] hf.attrs['qtys_enum_idx'] = [qty.toInt() for qty in qtys]
# Add channel names in case given # Add channel names in case given
if channelNames is not None: if channelNames is not None:
hf.attrs['channelNames'] = channelNames hf.attrs["channelNames"] = channelNames
ad = hf.create_dataset('audio', (1, data.shape[0], data.shape[1]), ad = hf.create_dataset(
"audio",
(1, data.shape[0], data.shape[1]),
dtype=data.dtype, dtype=data.dtype,
maxshape=(1, data.shape[0], data.shape[1]), maxshape=(1, data.shape[0], data.shape[1]),
compression='gzip') compression="gzip",
)
ad[0] = data ad[0] = data
return Measurement(mfn) return Measurement(mfn)
@ -865,9 +884,11 @@ class Measurement:
base_fn = os.path.splitext(fn)[0] base_fn = os.path.splitext(fn)[0]
if newfn is None: if newfn is None:
newfn = base_fn + '.h5' newfn = base_fn + ".h5"
if os.path.exists(newfn) and not force: if os.path.exists(newfn) and not force:
raise RuntimeError(f'Measurement file name {newfn} already exists in path, set "force" to true to overwrite') raise RuntimeError(
f'Measurement file name {newfn} already exists in path, set "force" to true to overwrite'
)
samplerate, data = wavfile.read(fn) samplerate, data = wavfile.read(fn)
if data.ndim == 2: if data.ndim == 2:
@ -878,16 +899,19 @@ class Measurement:
data = data[:, np.newaxis] data = data[:, np.newaxis]
sensitivity = np.ones(nchannels) sensitivity = np.ones(nchannels)
with h5.File(newfn, 'w') as hf: with h5.File(newfn, "w") as hf:
hf.attrs['samplerate'] = samplerate hf.attrs["samplerate"] = samplerate
hf.attrs['nchannels'] = nchannels hf.attrs["nchannels"] = nchannels
hf.attrs['time'] = timestamp hf.attrs["time"] = timestamp
hf.attrs['blocksize'] = 1 hf.attrs["blocksize"] = 1
hf.attrs['sensitivity'] = sensitivity hf.attrs["sensitivity"] = sensitivity
ad = hf.create_dataset('audio', (1, nframes, nchannels), ad = hf.create_dataset(
"audio",
(1, nframes, nchannels),
dtype=data.dtype, dtype=data.dtype,
maxshape=(1, nframes, nchannels), maxshape=(1, nframes, nchannels),
compression='gzip') compression="gzip",
)
ad[0] = data ad[0] = data
return Measurement(newfn) return Measurement(newfn)

View File

@ -7,8 +7,8 @@ import dataclasses, logging, os, time, h5py, threading
import numpy as np import numpy as np
from .lasp_atomic import Atomic from .lasp_atomic import Atomic
from .lasp_cpp import (LASP_VERSION_MAJOR, LASP_VERSION_MINOR, InDataHandler, from .lasp_cpp import InDataHandler, StreamMgr
StreamMgr) from .lasp_version import LASP_VERSION_MAJOR, LASP_VERSION_MINOR
@dataclasses.dataclass @dataclasses.dataclass
@ -84,10 +84,10 @@ class Recording:
try: try:
# Open the file # Open the file
self.f = h5py.File(self.fn, "w", 'stdio') self.f = h5py.File(self.fn, "w", "stdio")
self.f.flush() self.f.flush()
except Exception as e: except Exception as e:
logging.error(f'Error creating measurement file {e}') logging.error(f"Error creating measurement file {e}")
raise raise
# This flag is used to delete the file on finish(), and can be used # This flag is used to delete the file on finish(), and can be used
@ -191,7 +191,7 @@ class Recording:
""" """
if self.stop(): if self.stop():
logging.debug('Stop flag set, early return in inCallback') logging.debug("Stop flag set, early return in inCallback")
# Stop flag is raised. We do not add any data anymore. # Stop flag is raised. We do not add any data anymore.
return True return True

View File

@ -0,0 +1,6 @@
import importlib.metadata
__version__ = importlib.metadata.version(__package__ or __name__)
LASP_VERSION_MAJOR, LASP_VERSION_MINOR, LASP_VERSION_PATCH = [
int(a) for a in __version__.split(".")
]

Some files were not shown because too many files have changed in this diff Show More