2018-04-23 07:29:21 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
2023-05-16 10:12:36 +00:00
|
|
|
from __future__ import annotations
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2018-04-23 07:29:21 +00:00
|
|
|
"""!
|
|
|
|
Author: J.A. de Jong - ASCEE
|
|
|
|
|
|
|
|
Description: Measurement class
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
The ASCEE hdf5 measurement file format contains the following fields:
|
|
|
|
|
|
|
|
- Attributes:
|
|
|
|
|
2020-08-03 18:17:52 +00:00
|
|
|
'version': If not given, version 1 is assumed. For version 1, measurement data
|
|
|
|
is assumed to be acoustic data.
|
2018-05-02 14:29:53 +00:00
|
|
|
'samplerate': The audio data sample rate in Hz.
|
|
|
|
'nchannels': The number of audio channels in the file
|
|
|
|
'sensitivity': (Optionally) the stored sensitivity of the record channels.
|
2018-12-29 14:34:24 +00:00
|
|
|
This can be a single value, or a list of sensitivities for
|
2018-05-02 14:29:53 +00:00
|
|
|
each channel. Both representations are allowed.
|
2022-10-01 17:59:35 +00:00
|
|
|
|
|
|
|
For measurement files of LASP < v1.0
|
2021-10-18 07:17:36 +00:00
|
|
|
'qtys' : (Optionally): list of quantities that is recorded for each channel',
|
|
|
|
if this array is not found. Quantities are defaulted to 'Number / Full scale'
|
2018-05-02 14:29:53 +00:00
|
|
|
|
2022-10-01 17:59:35 +00:00
|
|
|
For measurement files of LASP >= 1.0
|
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
- Datasets:
|
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
'audio': 3-dimensional array of blocks of audio data. The first axis is the
|
2021-10-18 07:17:36 +00:00
|
|
|
block index, the second axis the sample number and the third axis is the
|
|
|
|
channel number. The data type is either int16, int32 or float64 / float32. If
|
|
|
|
raw data is stored as integer values (int16, int32), the actual values should
|
|
|
|
be pre-scaled by its maximum positive number (2**(nb-1) - 1), such that the
|
|
|
|
corresponding 'number' lies between -1.0 and 1.0.
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
'video': 4-dimensional array of video frames. The first index is the frame
|
|
|
|
number, the second the x-value of the pixel and the third is the
|
|
|
|
y-value of the pixel. Then, the last axis is the color. This axis has
|
|
|
|
length 3 and the colors are stored as (r,g,b). Where typically a
|
|
|
|
color depth of 256 is used (np.uint8 data format)
|
|
|
|
|
|
|
|
The video dataset can possibly be not present in the data.
|
|
|
|
|
2018-04-23 07:29:21 +00:00
|
|
|
"""
|
2018-12-29 14:34:24 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
__all__ = ["Measurement", "scaleBlockSens"]
|
2018-07-17 09:52:02 +00:00
|
|
|
from contextlib import contextmanager
|
2018-04-23 07:29:21 +00:00
|
|
|
import h5py as h5
|
|
|
|
import numpy as np
|
|
|
|
from .lasp_config import LASP_NUMPY_FLOAT_TYPE
|
2020-07-23 14:12:54 +00:00
|
|
|
from scipy.io import wavfile
|
2021-02-19 14:33:33 +00:00
|
|
|
import os, time, wave, logging
|
|
|
|
from .lasp_common import SIQtys, Qty, getFreq
|
2023-07-19 14:56:50 +00:00
|
|
|
from .lasp_version import LASP_VERSION_MAJOR, LASP_VERSION_MINOR
|
|
|
|
from .lasp_cpp import Window, DaqChannel, AvPowerSpectra
|
2022-10-20 10:28:46 +00:00
|
|
|
from typing import List
|
2023-04-03 11:16:39 +00:00
|
|
|
from functools import lru_cache
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
def getSampWidth(dtype):
|
2021-02-19 14:33:33 +00:00
|
|
|
"""Returns the width of a single sample in **bytes**.
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
dtype: numpy dtype
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Size of a sample in bytes (int)
|
|
|
|
"""
|
2020-04-29 13:00:00 +00:00
|
|
|
if dtype in (np.int32, np.float32):
|
2018-04-23 07:29:21 +00:00
|
|
|
return 4
|
|
|
|
elif dtype == np.int16:
|
|
|
|
return 2
|
2018-05-02 14:29:53 +00:00
|
|
|
elif dtype == np.float64:
|
|
|
|
return 8
|
2018-04-23 07:29:21 +00:00
|
|
|
else:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError("Invalid data type: %s" % dtype)
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2021-10-18 07:17:36 +00:00
|
|
|
|
2018-08-01 18:36:58 +00:00
|
|
|
def scaleBlockSens(block, sens):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Scale a block of raw data to return raw acoustic pressure data.
|
2018-08-01 18:36:58 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
block: block of raw data with integer data type
|
2021-02-19 14:33:33 +00:00
|
|
|
sens: array of sensitivity coeficients for
|
|
|
|
each channel.
|
2018-08-01 18:36:58 +00:00
|
|
|
"""
|
2021-02-25 11:15:37 +00:00
|
|
|
sens = np.asarray(sens)
|
2018-08-01 18:36:58 +00:00
|
|
|
assert sens.size == block.shape[1]
|
2018-10-01 13:12:45 +00:00
|
|
|
if np.issubdtype(block.dtype.type, np.integer):
|
2018-10-01 12:25:09 +00:00
|
|
|
sw = getSampWidth(block.dtype)
|
2023-07-19 14:56:50 +00:00
|
|
|
fac = 2 ** (8 * sw - 1) - 1
|
2018-10-01 12:25:09 +00:00
|
|
|
else:
|
2023-07-19 14:56:50 +00:00
|
|
|
fac = 1.0
|
2019-10-27 13:19:26 +00:00
|
|
|
return block.astype(LASP_NUMPY_FLOAT_TYPE) / fac / sens[np.newaxis, :]
|
2018-08-01 18:36:58 +00:00
|
|
|
|
2021-02-25 11:15:37 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
class IterRawData:
|
|
|
|
"""Iterate over stored blocks if the raw measurement data of a h5 file."""
|
|
|
|
|
|
|
|
def __init__(self, f, channels, **kwargs):
|
|
|
|
"""Initialize a BlockIter object.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
f: Audio dataset in the h5 file, accessed as f['audio']
|
2021-04-29 18:48:21 +00:00
|
|
|
channels: list of channel indices to use
|
|
|
|
istart: index of first sample
|
|
|
|
istop: index of last sample (not including istop)
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
"""
|
|
|
|
assert isinstance(channels, list)
|
2023-07-19 14:56:50 +00:00
|
|
|
fa = f["audio"]
|
2021-02-19 14:33:33 +00:00
|
|
|
self.fa = fa
|
|
|
|
self.i = 0
|
|
|
|
|
|
|
|
nblocks = fa.shape[0]
|
|
|
|
blocksize = fa.shape[1]
|
|
|
|
self.blocksize = blocksize
|
2023-05-16 10:12:36 +00:00
|
|
|
# nchannels = fa.shape[2]
|
2021-02-19 14:33:33 +00:00
|
|
|
self.channels = channels
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
self.istart = kwargs.pop("istart", 0)
|
|
|
|
self.istop = kwargs.pop("istop", blocksize * nblocks)
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
self.firstblock = self.istart // blocksize
|
|
|
|
self.lastblock = self.istop // blocksize
|
|
|
|
if self.istop % blocksize == 0:
|
|
|
|
self.lastblock -= 1
|
|
|
|
|
|
|
|
self.firstblock_start_offset = self.istart % blocksize
|
|
|
|
|
|
|
|
if self.istop < 0:
|
2023-07-19 14:56:50 +00:00
|
|
|
self.istop += blocksize * nblocks
|
2021-02-19 14:33:33 +00:00
|
|
|
if self.istop <= self.istart:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError("Stop index is smaller than start index")
|
2021-02-19 14:33:33 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
if self.istop != blocksize * nblocks:
|
2021-02-19 14:33:33 +00:00
|
|
|
self.lastblock_stop_offset = self.istop % blocksize
|
|
|
|
else:
|
|
|
|
self.lastblock_stop_offset = blocksize
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __next__(self):
|
|
|
|
"""Return the next block."""
|
|
|
|
fa = self.fa
|
|
|
|
|
2023-05-16 10:12:36 +00:00
|
|
|
# nblocks_to_return = self.lastblock-self.firstblock+1
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
block = self.firstblock + self.i
|
|
|
|
|
|
|
|
if block > self.lastblock:
|
|
|
|
raise StopIteration
|
|
|
|
|
|
|
|
if block == self.firstblock:
|
|
|
|
start_offset = self.firstblock_start_offset
|
|
|
|
else:
|
|
|
|
start_offset = 0
|
|
|
|
|
|
|
|
if block == self.lastblock:
|
|
|
|
stop_offset = self.lastblock_stop_offset
|
|
|
|
else:
|
|
|
|
stop_offset = self.blocksize
|
2021-03-31 09:23:57 +00:00
|
|
|
# print(f'block: {block}, starto: {start_offset}, stopo {stop_offset}')
|
2021-02-19 14:33:33 +00:00
|
|
|
|
2021-10-18 07:17:36 +00:00
|
|
|
self.i += 1
|
2023-05-16 10:12:36 +00:00
|
|
|
return fa[block, start_offset:stop_offset, :][:, self.channels]
|
2021-03-31 09:23:57 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
class IterData(IterRawData):
|
|
|
|
"""
|
|
|
|
Iterate over blocks of data, scaled with sensitivity and integer scaling
|
|
|
|
between 0 and 1
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
def __init__(self, fa, channels, sensitivity, **kwargs):
|
|
|
|
super().__init__(fa, channels, **kwargs)
|
|
|
|
self.sens = np.asarray(sensitivity)[self.channels]
|
|
|
|
assert self.sens.ndim == 1
|
|
|
|
|
|
|
|
def __next__(self):
|
|
|
|
nextraw = super().__next__()
|
|
|
|
return scaleBlockSens(nextraw, self.sens)
|
|
|
|
|
|
|
|
|
2018-04-23 07:29:21 +00:00
|
|
|
class Measurement:
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Provides access to measurement data stored in the h5 measurement file
|
|
|
|
format."""
|
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
def __init__(self, fn):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Initialize a Measurement object based on the filename."""
|
2023-07-19 14:56:50 +00:00
|
|
|
if ".h5" not in fn:
|
|
|
|
fn += ".h5"
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
# Full filepath
|
2018-04-23 07:29:21 +00:00
|
|
|
self.fn = fn
|
2018-07-17 09:52:02 +00:00
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
# Base filename
|
|
|
|
self.fn_base = os.path.split(fn)[1]
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
# Open the h5 file in read-plus mode, to allow for changing the
|
|
|
|
# measurement comment.
|
2023-07-19 14:56:50 +00:00
|
|
|
with h5.File(fn, "r") as f:
|
2018-07-17 09:52:02 +00:00
|
|
|
# Check for video data
|
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
f["video"]
|
2018-07-17 09:52:02 +00:00
|
|
|
self.has_video = True
|
|
|
|
except KeyError:
|
|
|
|
self.has_video = False
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
self.nblocks, self.blocksize, self.nchannels = f["audio"].shape
|
|
|
|
dtype = f["audio"].dtype
|
2020-08-20 08:23:24 +00:00
|
|
|
self.dtype = dtype
|
2018-07-17 09:52:02 +00:00
|
|
|
self.sampwidth = getSampWidth(dtype)
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
self.samplerate = f.attrs["samplerate"]
|
|
|
|
self.N = self.nblocks * self.blocksize
|
2019-10-27 13:19:26 +00:00
|
|
|
self.T = self.N / self.samplerate
|
2018-07-17 09:52:02 +00:00
|
|
|
|
2022-10-01 17:59:35 +00:00
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
self.version_major = f.attrs["LASP_VERSION_MAJOR"]
|
|
|
|
self.version_minor = f.attrs["LASP_VERSION_MINOR"]
|
2022-10-01 17:59:35 +00:00
|
|
|
except KeyError:
|
|
|
|
self.version_major = 0
|
|
|
|
self.version_minor = 1
|
|
|
|
|
2021-05-20 20:17:22 +00:00
|
|
|
# Due to a previous bug, the channel names were not stored
|
|
|
|
# consistently, i.e. as 'channel_names' and later camelcase.
|
2020-02-25 13:35:49 +00:00
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
self._channelNames = f.attrs["channelNames"]
|
2020-02-25 13:35:49 +00:00
|
|
|
except KeyError:
|
2021-05-20 20:17:22 +00:00
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
self._channelNames = f.attrs["channel_names"]
|
|
|
|
logging.info(
|
|
|
|
"Measurement file obtained which stores channel names with *old* attribute 'channel_names'"
|
|
|
|
)
|
2021-05-20 20:17:22 +00:00
|
|
|
except KeyError:
|
|
|
|
# No channel names found in measurement file
|
2023-07-19 14:56:50 +00:00
|
|
|
logging.info("No channel name data found in measurement")
|
|
|
|
self._channelNames = [f"Unnamed {i}" for i in range(self.nchannels)]
|
2020-02-25 13:35:49 +00:00
|
|
|
|
2018-07-17 09:52:02 +00:00
|
|
|
# comment = read-write thing
|
2023-07-19 14:56:50 +00:00
|
|
|
if "comment" in f.attrs:
|
|
|
|
self._comment = f.attrs["comment"]
|
2023-02-01 21:41:54 +00:00
|
|
|
else:
|
2023-07-19 14:56:50 +00:00
|
|
|
self._comment = ""
|
2018-07-17 09:52:02 +00:00
|
|
|
|
|
|
|
# Sensitivity
|
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
sens = f.attrs["sensitivity"]
|
|
|
|
self._sens = (
|
|
|
|
sens * np.ones(self.nchannels) if isinstance(sens, float) else sens
|
|
|
|
)
|
2018-07-17 09:52:02 +00:00
|
|
|
except KeyError:
|
|
|
|
self._sens = np.ones(self.nchannels)
|
|
|
|
|
2023-04-03 11:16:39 +00:00
|
|
|
# The time is cached AND ALWAYS ASSUMED TO BE AN IMMUTABLE OBJECT.
|
|
|
|
# It is also cached. Changing the measurement timestamp should not
|
|
|
|
# be done.
|
2023-07-19 14:56:50 +00:00
|
|
|
self._time = f.attrs["time"]
|
2018-07-17 09:52:02 +00:00
|
|
|
|
2022-10-20 10:29:24 +00:00
|
|
|
# Quantity stored as channel.
|
2022-10-01 17:59:35 +00:00
|
|
|
self._qtys = None
|
|
|
|
|
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
qtys_enum_idx = f.attrs["qtys_enum_idx"]
|
2022-10-06 19:13:21 +00:00
|
|
|
self._qtys = [SIQtys.fromInt(idx) for idx in qtys_enum_idx]
|
2022-10-01 17:59:35 +00:00
|
|
|
except KeyError:
|
2022-10-20 10:29:24 +00:00
|
|
|
try:
|
2023-07-19 14:56:50 +00:00
|
|
|
qtys_json = f.attrs["qtys"]
|
2022-10-20 10:29:24 +00:00
|
|
|
# Load quantity data
|
2023-04-14 15:04:27 +00:00
|
|
|
self._qtys = [Qty.from_json(qty_json) for qty_json in qtys_json]
|
2022-10-20 10:29:24 +00:00
|
|
|
except KeyError:
|
|
|
|
# If quantity data is not available, this is an 'old'
|
|
|
|
# measurement file.
|
|
|
|
pass
|
2022-10-01 17:59:35 +00:00
|
|
|
|
|
|
|
if self._qtys is None:
|
2021-06-17 08:44:08 +00:00
|
|
|
self._qtys = [SIQtys.default() for i in range(self.nchannels)]
|
2023-07-19 14:56:50 +00:00
|
|
|
logging.debug(
|
|
|
|
f"Physical quantity data not available in measurement file. Assuming {SIQtys.default}"
|
|
|
|
)
|
2020-08-22 09:00:08 +00:00
|
|
|
|
|
|
|
def setAttribute(self, atrname, value):
|
2021-05-20 20:17:22 +00:00
|
|
|
"""
|
|
|
|
Set an attribute in the measurement file, and keep a local copy in
|
|
|
|
memory for efficient accessing.
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
with self.file("r+") as f:
|
2020-08-22 09:00:08 +00:00
|
|
|
# Update comment attribute in the file
|
|
|
|
f.attrs[atrname] = value
|
2023-07-19 14:56:50 +00:00
|
|
|
setattr(self, "_" + atrname, value)
|
2020-08-22 09:00:08 +00:00
|
|
|
|
2018-09-13 13:34:11 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Returns filename base without extension."""
|
2018-09-13 13:34:11 +00:00
|
|
|
return os.path.splitext(self.fn_base)[0]
|
|
|
|
|
2020-02-25 13:35:49 +00:00
|
|
|
@property
|
|
|
|
def channelNames(self):
|
2021-05-20 20:17:22 +00:00
|
|
|
return self._channelNames
|
2020-02-25 13:35:49 +00:00
|
|
|
|
2020-08-22 09:00:08 +00:00
|
|
|
@channelNames.setter
|
|
|
|
def channelNames(self, newchnames):
|
|
|
|
if len(newchnames) != self.nchannels:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise RuntimeError("Invalid length of new channel names")
|
|
|
|
self.setAttribute("channelNames", newchnames)
|
2020-08-22 09:00:08 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def channelConfig(self):
|
2022-10-01 17:59:35 +00:00
|
|
|
chcfg = []
|
2023-07-19 14:56:50 +00:00
|
|
|
for chname, sens, qty in zip(self.channelNames, self.sensitivity, self.qtys):
|
2022-10-01 17:59:35 +00:00
|
|
|
ch = DaqChannel()
|
|
|
|
ch.enabled = True
|
|
|
|
ch.name = chname
|
|
|
|
ch.sensitivity = sens
|
|
|
|
ch.qty = qty.cpp_enum
|
|
|
|
chcfg.append(ch)
|
2020-11-19 10:12:07 +00:00
|
|
|
return chcfg
|
2020-08-22 09:00:08 +00:00
|
|
|
|
|
|
|
@channelConfig.setter
|
2022-10-20 10:28:46 +00:00
|
|
|
def channelConfig(self, chcfg: List[DaqChannel]):
|
2020-08-22 09:00:08 +00:00
|
|
|
chname = []
|
|
|
|
sens = []
|
|
|
|
qtys = []
|
|
|
|
for ch in chcfg:
|
2022-10-01 17:59:35 +00:00
|
|
|
chname.append(ch.name)
|
2020-08-22 09:00:08 +00:00
|
|
|
sens.append(ch.sensitivity)
|
2022-10-20 10:28:46 +00:00
|
|
|
qtys.append(SIQtys.fromCppEnum(ch.qty))
|
2020-08-22 09:00:08 +00:00
|
|
|
|
|
|
|
self.channelNames = chname
|
|
|
|
self.sensitivity = sens
|
|
|
|
self.qtys = qtys
|
|
|
|
|
|
|
|
@property
|
|
|
|
def qtys(self):
|
|
|
|
return self._qtys
|
|
|
|
|
|
|
|
@qtys.setter
|
|
|
|
def qtys(self, newqtys):
|
|
|
|
if not len(newqtys) == len(self._qtys):
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError("Invalid number of quantities")
|
2022-10-20 10:28:46 +00:00
|
|
|
qtys_int = [qty.toInt() for qty in newqtys]
|
2020-08-22 09:00:08 +00:00
|
|
|
# Use setAttribute here, but thos store the jsonified version as well,
|
|
|
|
# which we have to overwrite again with the deserialized ones. This is
|
|
|
|
# actually not a very nice way of coding.
|
2023-07-19 14:56:50 +00:00
|
|
|
with self.file("r+") as f:
|
2022-10-20 10:28:46 +00:00
|
|
|
# Update comment attribute in the file
|
2023-07-19 14:56:50 +00:00
|
|
|
f.attrs["qtys_enum_idx"] = qtys_int
|
2022-10-20 10:28:46 +00:00
|
|
|
|
2020-08-22 09:00:08 +00:00
|
|
|
self._qtys = newqtys
|
|
|
|
|
2018-07-17 09:52:02 +00:00
|
|
|
@contextmanager
|
2023-07-19 14:56:50 +00:00
|
|
|
def file(self, mode="r"):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Contextmanager which opens the storage file and yields the file.
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2018-07-17 09:52:02 +00:00
|
|
|
Args:
|
|
|
|
mode: Opening mode for the file. Should either be 'r', or 'r+'
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
if mode not in ("r", "r+"):
|
|
|
|
raise ValueError("Invalid file opening mode.")
|
2018-07-17 09:52:02 +00:00
|
|
|
with h5.File(self.fn, mode) as f:
|
|
|
|
yield f
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def comment(self):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Return the measurement comment.
|
2019-01-05 11:13:05 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
The measurement comment (text string)
|
|
|
|
"""
|
2018-05-02 14:29:53 +00:00
|
|
|
return self._comment
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
@comment.setter
|
|
|
|
def comment(self, cmt):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Set the measurement comment.
|
2019-01-05 11:13:05 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
cmt: Comment text string to set
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
with self.file("r+") as f:
|
2019-01-05 11:13:05 +00:00
|
|
|
# Update comment attribute in the file
|
2023-07-19 14:56:50 +00:00
|
|
|
f.attrs["comment"] = cmt
|
2018-07-17 09:52:02 +00:00
|
|
|
self._comment = cmt
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
@property
|
2023-04-03 11:16:39 +00:00
|
|
|
@lru_cache()
|
2018-04-23 07:29:21 +00:00
|
|
|
def recTime(self):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Returns the total recording time of the measurement, in float
|
|
|
|
seconds."""
|
2019-10-27 13:19:26 +00:00
|
|
|
return self.blocksize * self.nblocks / self.samplerate
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def time(self):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Returns the measurement time in seconds since the epoch."""
|
2018-07-17 09:52:02 +00:00
|
|
|
return self._time
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2023-04-03 11:16:39 +00:00
|
|
|
@property
|
|
|
|
@lru_cache()
|
|
|
|
def timestr(self):
|
|
|
|
"""
|
|
|
|
Return a properly formatted string of the measurement time, in order of
|
2023-04-14 15:04:27 +00:00
|
|
|
|
2023-04-03 11:16:39 +00:00
|
|
|
year-month-day hour etc.
|
|
|
|
|
|
|
|
"""
|
|
|
|
time_struct = time.localtime(self.time)
|
2023-07-19 14:56:50 +00:00
|
|
|
time_string = time.strftime("%Y-%m-%d %H:%M:%S", time_struct)
|
2023-04-03 11:16:39 +00:00
|
|
|
return time_string
|
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
def rms(self, channels=None, substract_average=False):
|
2021-02-19 14:33:33 +00:00
|
|
|
"""Returns the root mean square values for each channel
|
2018-05-02 14:29:53 +00:00
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
Args:
|
|
|
|
channels: list of channels
|
|
|
|
substract_average: If set to true, computes the rms of only the
|
|
|
|
oscillating component of the signal, which is in fact the signal
|
|
|
|
variance.
|
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
Returns:
|
|
|
|
1D array with rms values for each channel
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
meansquare = 0.0 # Mean square of the signal, including its average
|
|
|
|
sum_ = 0.0 # Sumf of the values of the signal, used to compute average
|
2021-03-31 09:23:57 +00:00
|
|
|
N = 0
|
2018-07-28 12:43:57 +00:00
|
|
|
with self.file() as f:
|
2021-02-19 14:33:33 +00:00
|
|
|
for block in self.iterData(channels):
|
2021-03-31 09:23:57 +00:00
|
|
|
Nblock = block.shape[0]
|
|
|
|
sum_ += np.sum(block, axis=0)
|
|
|
|
N += Nblock
|
2021-02-19 14:33:33 +00:00
|
|
|
meansquare += np.sum(block**2, axis=0) / self.N
|
2021-03-31 09:23:57 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
avg = sum_ / N
|
2023-04-14 15:04:27 +00:00
|
|
|
# In fact, this is not the complete RMS, as in includes the DC
|
2021-03-31 09:23:57 +00:00
|
|
|
# If p = p_dc + p_osc, then rms(p_osc) = sqrt(ms(p)-ms(p_dc))
|
|
|
|
if substract_average:
|
|
|
|
meansquare -= avg**2
|
|
|
|
rms = np.sqrt(meansquare)
|
|
|
|
return rms
|
|
|
|
|
|
|
|
def variance(self, channels=None):
|
|
|
|
return self.rms(substract_average=True)
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
def rawData(self, channels=None, **kwargs):
|
2023-04-14 15:04:27 +00:00
|
|
|
"""Returns the raw data as stored in the measurement file,
|
2021-02-19 14:33:33 +00:00
|
|
|
without any transformations applied
|
2020-08-20 08:23:24 +00:00
|
|
|
|
|
|
|
args:
|
2021-02-19 14:33:33 +00:00
|
|
|
channels: list, or tuple of channel numbers to export. If not defined, all
|
|
|
|
channels in the measurement are returned
|
|
|
|
|
|
|
|
returns:
|
|
|
|
Numpy array with data. The first axis is always the time instance,
|
|
|
|
the second axis the channel number.
|
2020-08-20 08:23:24 +00:00
|
|
|
|
|
|
|
"""
|
2021-02-19 14:33:33 +00:00
|
|
|
if channels is None:
|
|
|
|
channels = list(range(self.nchannels))
|
|
|
|
|
|
|
|
rawdata = []
|
|
|
|
|
|
|
|
with self.file() as f:
|
|
|
|
for block in IterRawData(f, channels, **kwargs):
|
|
|
|
rawdata.append(block)
|
2020-08-20 08:23:24 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
return np.concatenate(rawdata, axis=0)
|
2020-08-20 08:23:24 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
def iterData(self, channels, **kwargs):
|
2023-07-19 14:56:50 +00:00
|
|
|
sensitivity = kwargs.pop("sensitivity", self.sensitivity)
|
2021-02-19 14:33:33 +00:00
|
|
|
if channels is None:
|
|
|
|
channels = list(range(self.nchannels))
|
|
|
|
with self.file() as f:
|
|
|
|
for block in IterData(f, channels, sensitivity, **kwargs):
|
|
|
|
yield block
|
|
|
|
|
|
|
|
def data(self, channels=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Returns the measurement data, scaled and sensitivity applied.
|
|
|
|
"""
|
|
|
|
data = []
|
|
|
|
for d in self.iterData(channels, **kwargs):
|
|
|
|
data.append(d)
|
2018-05-02 14:29:53 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
return np.concatenate(data, axis=0)
|
2018-04-23 07:29:21 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
def CPS(self, channels=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Compute single-sided Cross-Power-Spectrum of the measurement channels
|
2018-07-17 09:52:02 +00:00
|
|
|
|
|
|
|
Args:
|
2021-02-19 14:33:33 +00:00
|
|
|
channels: Channels to compute for (numbers)
|
|
|
|
|
|
|
|
Optional arguments:
|
|
|
|
nfft: FFT length
|
|
|
|
window: Window type
|
|
|
|
overlap: Overlap percentage (value between 0.0 and up to and
|
|
|
|
including 100.0)
|
2023-04-14 15:04:27 +00:00
|
|
|
weighting:
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
Returns:
|
2023-04-14 15:04:27 +00:00
|
|
|
Cross-power-spectra. C[freq, ch_i, ch_j] = C_ij
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
nfft = kwargs.pop("nfft", 2048)
|
|
|
|
window = kwargs.pop("windowType", Window.WindowType.Hann)
|
|
|
|
overlap = kwargs.pop("overlap", 50.0)
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
if channels is None:
|
|
|
|
channels = list(range(self.nchannels))
|
|
|
|
|
|
|
|
nchannels = len(channels)
|
2022-11-08 13:27:16 +00:00
|
|
|
aps = AvPowerSpectra(nfft, window, overlap)
|
2021-02-19 14:33:33 +00:00
|
|
|
freq = getFreq(self.samplerate, nfft)
|
|
|
|
|
|
|
|
for data in self.iterData(channels, **kwargs):
|
2022-11-08 13:27:16 +00:00
|
|
|
CS = aps.compute(data)
|
2021-02-19 14:33:33 +00:00
|
|
|
|
2022-11-14 10:11:03 +00:00
|
|
|
return freq, aps.get_est()
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
def periodicAverage(self, N, channels=None, noiseCorrection=True, **kwargs):
|
2018-07-17 09:52:02 +00:00
|
|
|
"""
|
2021-02-19 14:33:33 +00:00
|
|
|
Return the (coherent) periodic average the measurement. This method is
|
|
|
|
useful for a situation of periodic excitation.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
N: The number of samples in one period. This value should
|
|
|
|
correspond with the period of the excitation!
|
|
|
|
noiseCorrection: whether to apply coherent averaging, according to
|
|
|
|
the Sliding Window correlation method (SWiC): Telle et al.: A Novel
|
|
|
|
Approach for Impulse Response Measurements with Time-Varying Noise.
|
|
|
|
If set to False, just the arithmetic average is used.
|
|
|
|
"""
|
|
|
|
# Create blocks of equal length N
|
|
|
|
Ntot = self.N
|
2023-07-19 14:56:50 +00:00
|
|
|
Nblocks = Ntot // N
|
2021-02-19 14:33:33 +00:00
|
|
|
|
|
|
|
# TODO: This method graps the whole measurement file into memory. Can
|
|
|
|
# only be done with relatively small measurement files.
|
|
|
|
signal = self.data(channels)
|
|
|
|
|
|
|
|
# Estimate noise power in block
|
2023-07-19 14:56:50 +00:00
|
|
|
blocks = [signal[i * N : (i + 1) * N] for i in range(Nblocks)]
|
2023-04-14 15:04:27 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
if noiseCorrection:
|
2023-04-14 15:04:27 +00:00
|
|
|
# The difference between the measured signal in the previous block and
|
2021-03-31 09:23:57 +00:00
|
|
|
# the current block
|
2023-07-19 14:56:50 +00:00
|
|
|
en = [None] + [blocks[i] - blocks[i - 1] for i in range(1, Nblocks)]
|
2023-04-14 15:04:27 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
noise_est = [None] + [
|
|
|
|
-np.average(en[i] * en[i + 1]) for i in range(1, len(en) - 1)
|
|
|
|
]
|
2023-04-14 15:04:27 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
# Create weighting coefficients
|
2023-07-19 14:56:50 +00:00
|
|
|
sum_inverse_noise = sum([1 / n for n in noise_est[1:]])
|
|
|
|
c_n = [1 / (ni * sum_inverse_noise) for ni in noise_est[1:]]
|
2021-02-19 14:33:33 +00:00
|
|
|
else:
|
2023-07-19 14:56:50 +00:00
|
|
|
c_n = [1 / (Nblocks - 2)] * (Nblocks - 2)
|
2021-02-19 14:33:33 +00:00
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
assert np.isclose(sum(c_n), 1.0)
|
2023-07-19 14:56:50 +00:00
|
|
|
assert Nblocks - 2 == len(c_n)
|
2023-04-14 15:04:27 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
# Average signal over blocks
|
|
|
|
avg = np.zeros((blocks[0].shape), dtype=float)
|
2023-07-19 14:56:50 +00:00
|
|
|
for n in range(0, Nblocks - 2):
|
|
|
|
avg += c_n[n] * blocks[n + 1]
|
2021-03-31 09:23:57 +00:00
|
|
|
|
2021-02-19 14:33:33 +00:00
|
|
|
return avg
|
|
|
|
|
|
|
|
def periodicCPS(self, N, channels=None, **kwargs):
|
|
|
|
"""
|
|
|
|
Compute Cross-Spectral Density based on periodic excitation. Uses noise
|
|
|
|
reduction by time-averaging the data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if channels is None:
|
|
|
|
channels = list(range(self.nchannels))
|
|
|
|
|
|
|
|
nchannels = len(channels)
|
|
|
|
window = Window.rectangular
|
|
|
|
ps = PowerSpectra(N, window)
|
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
avg = np.asfortranarray(self.periodicAverage(N, channels, **kwargs))
|
2021-02-19 14:33:33 +00:00
|
|
|
CS = ps.compute(avg)
|
|
|
|
freq = getFreq(self.samplerate, N)
|
|
|
|
|
2023-04-14 15:04:27 +00:00
|
|
|
return freq, CS
|
2021-02-19 14:33:33 +00:00
|
|
|
|
2018-04-23 07:29:21 +00:00
|
|
|
@property
|
|
|
|
def sensitivity(self):
|
2020-08-22 09:00:08 +00:00
|
|
|
"""Sensitivity of the data in U^-1, from floating point data scaled
|
|
|
|
between -1.0 and 1.0 to Units [U].
|
2020-04-29 13:00:00 +00:00
|
|
|
|
|
|
|
If the sensitivity is not stored in the measurement file, this
|
2020-08-22 09:00:08 +00:00
|
|
|
function returns 1.0 for each channel
|
2018-05-02 14:29:53 +00:00
|
|
|
"""
|
2018-07-17 09:52:02 +00:00
|
|
|
return self._sens
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
@sensitivity.setter
|
|
|
|
def sensitivity(self, sens):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Set the sensitivity of the measurement in the file.
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
sens: sensitivity data, should be a float, or an array of floats
|
|
|
|
equal to the number of channels.
|
|
|
|
"""
|
|
|
|
if isinstance(sens, float):
|
2020-04-29 13:00:00 +00:00
|
|
|
# Put all sensitivities equal
|
2019-10-27 13:19:26 +00:00
|
|
|
sens = sens * np.ones(self.nchannels)
|
2020-08-22 09:00:08 +00:00
|
|
|
elif isinstance(sens, list):
|
|
|
|
sens = np.asarray(sens)
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
valid = sens.ndim == 1
|
|
|
|
valid &= sens.shape[0] == self.nchannels
|
2018-07-28 12:43:57 +00:00
|
|
|
valid &= sens.dtype == float
|
2018-05-02 14:29:53 +00:00
|
|
|
if not valid:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError("Invalid sensitivity value(s) given")
|
|
|
|
with self.file("r+") as f:
|
|
|
|
f.attrs["sensitivity"] = sens
|
2018-07-31 09:23:24 +00:00
|
|
|
self._sens = sens
|
2020-09-18 06:55:24 +00:00
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
def checkOverflow(self, channels):
|
2020-09-18 06:55:24 +00:00
|
|
|
"""Coarse check for overflow in measurement.
|
|
|
|
|
|
|
|
Return:
|
|
|
|
True if overflow is possible, else False
|
|
|
|
"""
|
|
|
|
|
2021-03-31 09:23:57 +00:00
|
|
|
for block in self.iterData(channels):
|
|
|
|
dtype = block.dtype
|
2023-07-19 14:56:50 +00:00
|
|
|
if dtype.kind == "i":
|
2021-03-31 09:23:57 +00:00
|
|
|
# minvalue = np.iinfo(dtype).min
|
|
|
|
maxvalue = np.iinfo(dtype).max
|
2023-07-19 14:56:50 +00:00
|
|
|
if np.max(np.abs(block)) >= 0.9 * maxvalue:
|
2021-03-31 09:23:57 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
# Cannot check for floating point values.
|
|
|
|
return False
|
2020-09-18 06:55:24 +00:00
|
|
|
return False
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
def exportAsWave(self, fn=None, force=False, dtype=None, normalize=False, **kwargs):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Export measurement file as wave. In case the measurement data is
|
2020-08-20 08:23:24 +00:00
|
|
|
stored as floats, the values are scaled to the proper integer (PCM)
|
|
|
|
data format.
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
fn: If given, this will be the filename to write to. If the
|
|
|
|
filename does not end with '.wav', this extension is added.
|
|
|
|
|
|
|
|
force: If True, overwrites any existing files with the given name
|
|
|
|
, otherwise a RuntimeError is raised.
|
|
|
|
|
2023-04-14 15:04:27 +00:00
|
|
|
dtype: if not None, convert data to this data type.
|
|
|
|
Options are 'int16', 'int32', 'float32'.
|
2019-12-08 13:29:12 +00:00
|
|
|
|
|
|
|
normalize: If set: normalize the level to something sensible.
|
2018-04-23 07:29:21 +00:00
|
|
|
"""
|
|
|
|
if fn is None:
|
|
|
|
fn = self.fn
|
|
|
|
fn = os.path.splitext(fn)[0]
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
if os.path.splitext(fn)[1] != ".wav":
|
|
|
|
fn += ".wav"
|
2018-04-23 07:29:21 +00:00
|
|
|
|
|
|
|
if os.path.exists(fn) and not force:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise RuntimeError(f"File already exists: {fn}")
|
2019-12-08 13:29:12 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
if not np.isclose(self.samplerate % 1, 0):
|
|
|
|
raise RuntimeError(
|
|
|
|
f"Sample rates should be approximately integer for exporting to Wave to work"
|
|
|
|
)
|
2021-04-27 12:19:35 +00:00
|
|
|
|
2021-04-29 18:48:21 +00:00
|
|
|
# TODO: With VERY large measurment files, this is not possible! Is this
|
|
|
|
# a theoretical case?
|
2023-04-14 15:04:27 +00:00
|
|
|
# TODO: add sensitivity? Then use self.data() instead of self.rawData()
|
2021-04-29 18:48:21 +00:00
|
|
|
data = self.rawData(**kwargs)
|
2019-12-08 13:29:12 +00:00
|
|
|
|
2020-08-20 08:23:24 +00:00
|
|
|
if normalize:
|
2021-04-29 18:48:21 +00:00
|
|
|
# Scale back to maximum of absolute value
|
|
|
|
maxabs = np.max(np.abs(data))
|
2022-10-13 10:06:51 +00:00
|
|
|
data = data / maxabs # "data /= maxabs" fails if dtpyes differ
|
2019-12-08 13:29:12 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
if dtype == None:
|
2023-04-14 15:04:27 +00:00
|
|
|
dtype = data.dtype # keep existing
|
|
|
|
logging.debug(f"dtype not passed as arg; using dtype = {dtype}")
|
|
|
|
|
|
|
|
# dtype conversion
|
2023-07-19 14:56:50 +00:00
|
|
|
if dtype == "int16":
|
2023-04-14 15:04:27 +00:00
|
|
|
newtype = np.int16
|
|
|
|
newsampwidth = 2
|
2023-07-19 14:56:50 +00:00
|
|
|
elif dtype == "int32":
|
2023-04-14 15:04:27 +00:00
|
|
|
newtype = np.int32
|
|
|
|
newsampwidth = 4
|
2023-07-19 14:56:50 +00:00
|
|
|
elif dtype == "float32":
|
2023-04-14 15:04:27 +00:00
|
|
|
newtype = np.float32
|
2023-07-19 14:56:50 +00:00
|
|
|
elif dtype == "float64":
|
2023-04-14 15:04:27 +00:00
|
|
|
newtype = np.float64
|
|
|
|
else:
|
|
|
|
logging.debug(f"cannot handle this dtype {dtype}")
|
|
|
|
pass
|
2019-12-08 13:29:12 +00:00
|
|
|
|
2023-04-14 15:04:27 +00:00
|
|
|
# Convert range to [-1, 1]
|
|
|
|
# TODO: this is wrong for float data where full scale > 1
|
|
|
|
sensone = np.ones_like(self.sensitivity)
|
|
|
|
data = scaleBlockSens(data, sensone)
|
2018-05-02 14:29:53 +00:00
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
if dtype == "int16" or dtype == "int32":
|
2023-04-14 15:04:27 +00:00
|
|
|
# Scale data to integer range and convert to integers
|
2023-07-19 14:56:50 +00:00
|
|
|
scalefac = 2 ** (8 * newsampwidth - 1) - 1
|
|
|
|
data = (data * scalefac).astype(newtype)
|
2020-08-20 08:23:24 +00:00
|
|
|
|
2023-04-14 15:04:27 +00:00
|
|
|
wavfile.write(fn, int(self.samplerate), data.astype(newtype))
|
2019-12-08 13:29:12 +00:00
|
|
|
|
2018-05-02 14:29:53 +00:00
|
|
|
@staticmethod
|
2023-07-19 14:56:50 +00:00
|
|
|
def fromtxt(
|
|
|
|
fn,
|
|
|
|
skiprows,
|
|
|
|
samplerate,
|
|
|
|
sensitivity,
|
|
|
|
mfn=None,
|
|
|
|
timestamp=None,
|
|
|
|
delimiter="\t",
|
|
|
|
firstcoltime=True,
|
|
|
|
):
|
2020-04-29 13:00:00 +00:00
|
|
|
"""Converts a txt file to a LASP Measurement file, opens the associated
|
|
|
|
Measurement object and returns it. The measurement file will have the
|
|
|
|
same file name as the txt file, except with h5 extension.
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
Args:
|
|
|
|
fn: Filename of text file
|
|
|
|
skiprows: Number of header rows in text file to skip
|
|
|
|
samplerate: Sampling frequency in [Hz]
|
|
|
|
sensitivity: 1D array of channel sensitivities
|
|
|
|
mfn: Filepath where measurement file is stored. If not given,
|
|
|
|
a h5 file will be created along fn, which shares its basename
|
|
|
|
timestamp: If given, a custom timestamp for the measurement
|
|
|
|
(integer containing seconds since epoch). If not given, the
|
|
|
|
timestamp is obtained from the last modification time.
|
|
|
|
delimiter: Column delimiter
|
|
|
|
firstcoltime: If true, the first column is the treated as the
|
|
|
|
sample time.
|
|
|
|
"""
|
|
|
|
if not os.path.exists(fn):
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError(f"File {fn} does not exist.")
|
2018-05-02 14:29:53 +00:00
|
|
|
if timestamp is None:
|
|
|
|
timestamp = os.path.getmtime(fn)
|
|
|
|
if mfn is None:
|
2023-07-19 14:56:50 +00:00
|
|
|
mfn = os.path.splitext(fn)[0] + ".h5"
|
2019-12-08 13:29:12 +00:00
|
|
|
else:
|
2023-07-19 14:56:50 +00:00
|
|
|
mfn = os.path.splitext(mfn)[0] + ".h5"
|
2018-05-02 14:29:53 +00:00
|
|
|
|
|
|
|
dat = np.loadtxt(fn, skiprows=skiprows, delimiter=delimiter)
|
|
|
|
if firstcoltime:
|
|
|
|
time = dat[:, 0]
|
2019-10-27 13:19:26 +00:00
|
|
|
if not np.isclose(time[1] - time[0], 1 / samplerate):
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Samplerate given does not agree with " "samplerate in file"
|
|
|
|
)
|
2019-10-27 13:19:26 +00:00
|
|
|
|
|
|
|
# Chop off first column
|
2018-05-02 14:29:53 +00:00
|
|
|
dat = dat[:, 1:]
|
|
|
|
nchannels = dat.shape[1]
|
2019-10-27 13:19:26 +00:00
|
|
|
if nchannels != sensitivity.shape[0]:
|
|
|
|
raise ValueError(
|
2023-07-19 14:56:50 +00:00
|
|
|
f"Invalid sensitivity length given. Should be: {nchannels}"
|
|
|
|
)
|
|
|
|
|
|
|
|
with h5.File(mfn, "w") as hf:
|
|
|
|
hf.attrs["samplerate"] = samplerate
|
|
|
|
hf.attrs["sensitivity"] = sensitivity
|
|
|
|
hf.attrs["time"] = timestamp
|
|
|
|
hf.attrs["blocksize"] = 1
|
|
|
|
hf.attrs["nchannels"] = nchannels
|
|
|
|
ad = hf.create_dataset(
|
|
|
|
"audio",
|
|
|
|
(1, dat.shape[0], dat.shape[1]),
|
|
|
|
dtype=dat.dtype,
|
|
|
|
maxshape=(1, dat.shape[0], dat.shape[1]),
|
|
|
|
compression="gzip",
|
|
|
|
)
|
2018-05-02 14:29:53 +00:00
|
|
|
ad[0] = dat
|
|
|
|
return Measurement(mfn)
|
2019-10-27 13:19:26 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2023-07-19 14:56:50 +00:00
|
|
|
def fromnpy(
|
|
|
|
data,
|
|
|
|
samplerate,
|
|
|
|
sensitivity,
|
|
|
|
mfn,
|
|
|
|
timestamp=None,
|
|
|
|
qtys: List[SIQtys] = None,
|
|
|
|
channelNames: List[str] = None,
|
|
|
|
force=False,
|
|
|
|
) -> Measurement:
|
2023-05-16 10:12:36 +00:00
|
|
|
"""
|
2023-07-14 11:48:26 +00:00
|
|
|
Converts a numpy array to a LASP Measurement file, opens the
|
|
|
|
associated Measurement object and returns it. The measurement file will
|
|
|
|
have the same file name as the txt file, except with h5 extension.
|
2019-10-27 13:19:26 +00:00
|
|
|
|
|
|
|
Args:
|
2020-04-29 13:00:00 +00:00
|
|
|
data: Numpy array, first column is sample, second is channel. Can
|
2023-05-16 10:12:36 +00:00
|
|
|
also be specified with a single column for single-channel data.
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2019-10-27 13:19:26 +00:00
|
|
|
samplerate: Sampling frequency in [Hz]
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2023-05-16 10:12:36 +00:00
|
|
|
sensitivity: 1D array of channel sensitivities in [U^-1], where U is
|
|
|
|
the recorded unit.
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2023-05-16 10:12:36 +00:00
|
|
|
mfn: Filepath of the file where the data is stored.
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2019-10-27 13:19:26 +00:00
|
|
|
timestamp: If given, a custom timestamp for the measurement
|
2023-05-16 10:12:36 +00:00
|
|
|
(integer containing seconds since epoch).
|
|
|
|
|
|
|
|
qtys: If a list of physical quantity data is given here
|
|
|
|
|
|
|
|
channelNames: Name of the channels
|
2023-07-19 14:56:50 +00:00
|
|
|
|
2022-01-28 13:14:01 +00:00
|
|
|
force: If True, overwrites existing files with specified `mfn`
|
|
|
|
name.
|
2019-10-27 13:19:26 +00:00
|
|
|
"""
|
2023-07-19 14:56:50 +00:00
|
|
|
if os.path.splitext(mfn)[1] != ".h5":
|
|
|
|
mfn += ".h5"
|
2022-01-28 13:14:01 +00:00
|
|
|
if os.path.exists(mfn) and not force:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError(f"File {mfn} already exist.")
|
2019-10-27 13:19:26 +00:00
|
|
|
if timestamp is None:
|
|
|
|
timestamp = int(time.time())
|
|
|
|
|
|
|
|
if data.ndim != 2:
|
|
|
|
data = data[:, np.newaxis]
|
|
|
|
|
2020-07-09 07:45:08 +00:00
|
|
|
try:
|
|
|
|
len(sensitivity)
|
|
|
|
except:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise ValueError("Sensitivity should be given as array-like data type")
|
2020-07-09 07:45:08 +00:00
|
|
|
sensitivity = np.asarray(sensitivity)
|
|
|
|
|
2019-10-27 13:19:26 +00:00
|
|
|
nchannels = data.shape[1]
|
|
|
|
if nchannels != sensitivity.shape[0]:
|
|
|
|
raise ValueError(
|
2023-07-19 14:56:50 +00:00
|
|
|
f"Invalid sensitivity length given. Should be: {nchannels}"
|
|
|
|
)
|
2019-10-27 13:19:26 +00:00
|
|
|
|
2023-05-16 10:12:36 +00:00
|
|
|
if channelNames is not None:
|
|
|
|
if len(channelNames) != nchannels:
|
|
|
|
raise RuntimeError("Illegal length of channelNames list given")
|
|
|
|
|
|
|
|
if qtys is None:
|
2023-07-19 14:56:50 +00:00
|
|
|
qtys = [SIQtys.AP] * nchannels
|
2023-05-16 10:12:36 +00:00
|
|
|
else:
|
|
|
|
if len(qtys) != nchannels:
|
|
|
|
raise RuntimeError("Illegal length of qtys list given")
|
|
|
|
|
|
|
|
qtyvals = [qty.value for qty in qtys]
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
with h5.File(mfn, "w") as hf:
|
|
|
|
hf.attrs["samplerate"] = samplerate
|
|
|
|
hf.attrs["sensitivity"] = sensitivity
|
|
|
|
hf.attrs["time"] = timestamp
|
|
|
|
hf.attrs["blocksize"] = 1
|
|
|
|
hf.attrs["nchannels"] = nchannels
|
2023-05-16 10:12:36 +00:00
|
|
|
|
|
|
|
# Add physical quantity indices
|
2023-07-19 14:56:50 +00:00
|
|
|
hf.attrs["qtys_enum_idx"] = [qtyval.toInt() for qtyval in qtyvals]
|
2023-05-16 10:12:36 +00:00
|
|
|
|
|
|
|
# Add channel names in case given
|
|
|
|
if channelNames is not None:
|
2023-07-19 14:56:50 +00:00
|
|
|
hf.attrs["channelNames"] = channelNames
|
|
|
|
|
|
|
|
ad = hf.create_dataset(
|
|
|
|
"audio",
|
|
|
|
(1, data.shape[0], data.shape[1]),
|
|
|
|
dtype=data.dtype,
|
|
|
|
maxshape=(1, data.shape[0], data.shape[1]),
|
|
|
|
compression="gzip",
|
|
|
|
)
|
2019-10-27 13:19:26 +00:00
|
|
|
ad[0] = data
|
|
|
|
return Measurement(mfn)
|
2020-07-09 07:45:08 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def fromWaveFile(fn, newfn=None, force=False, timestamp=None):
|
|
|
|
"""Convert a measurement file to a wave file, and return the
|
|
|
|
measurement handle."""
|
|
|
|
if timestamp is None:
|
|
|
|
timestamp = int(time.time())
|
|
|
|
|
|
|
|
base_fn = os.path.splitext(fn)[0]
|
|
|
|
if newfn is None:
|
2023-07-19 14:56:50 +00:00
|
|
|
newfn = base_fn + ".h5"
|
2020-07-09 07:45:08 +00:00
|
|
|
if os.path.exists(newfn) and not force:
|
2023-07-19 14:56:50 +00:00
|
|
|
raise RuntimeError(
|
|
|
|
f'Measurement file name {newfn} already exists in path, set "force" to true to overwrite'
|
|
|
|
)
|
2020-07-09 07:45:08 +00:00
|
|
|
|
2020-07-23 14:12:54 +00:00
|
|
|
samplerate, data = wavfile.read(fn)
|
|
|
|
if data.ndim == 2:
|
|
|
|
nframes, nchannels = data.shape
|
|
|
|
else:
|
|
|
|
nchannels = 1
|
|
|
|
nframes = len(data)
|
|
|
|
data = data[:, np.newaxis]
|
|
|
|
sensitivity = np.ones(nchannels)
|
|
|
|
|
2023-07-19 14:56:50 +00:00
|
|
|
with h5.File(newfn, "w") as hf:
|
|
|
|
hf.attrs["samplerate"] = samplerate
|
|
|
|
hf.attrs["nchannels"] = nchannels
|
|
|
|
hf.attrs["time"] = timestamp
|
|
|
|
hf.attrs["blocksize"] = 1
|
|
|
|
hf.attrs["sensitivity"] = sensitivity
|
|
|
|
ad = hf.create_dataset(
|
|
|
|
"audio",
|
|
|
|
(1, nframes, nchannels),
|
|
|
|
dtype=data.dtype,
|
|
|
|
maxshape=(1, nframes, nchannels),
|
|
|
|
compression="gzip",
|
|
|
|
)
|
2020-07-23 14:12:54 +00:00
|
|
|
ad[0] = data
|
2020-07-09 07:45:08 +00:00
|
|
|
|
2020-07-23 14:12:54 +00:00
|
|
|
return Measurement(newfn)
|