Inbetween pybind11 commit. Nothing working.

This commit is contained in:
Anne de Jong 2022-06-13 19:35:41 +02:00
parent f635cac209
commit 7eaaf43653
4 changed files with 417 additions and 448 deletions

View File

@ -144,8 +144,9 @@ set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py")
set(DEPS "${CMAKE_CURRENT_SOURCE_DIR}/*.py"
"${CMAKE_CURRENT_SOURCE_DIR}/lasp/*.py"
"wrappers"
"lasp_daq")
# "wrappers"
# "lasp_device_lib")
)
set(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/build/timestamp")

View File

@ -9,8 +9,8 @@
#include <optional>
/**
* @brief Data coming from / going to DAQ. Non-interleaved format, which means
* data in buffer is ordered by channel: _ptr[sample+channel*nsamples]
* @brief Data coming from / going to DAQ. **Non-interleaved format**, which means
* data in buffer is ordered by channel: _ptr[sample+channel*nframes]
*/
class DaqData {
protected:
@ -38,24 +38,13 @@ public:
us size_bytes() const { return _data.size(); }
};
/**
* @brief Typed DaqData, in which the type is specified
*
* @tparam T
*/
template <typename T> class TypedDaqData : public DaqData {
T *data() { return static_cast<T *>(_data.data()); }
};
/**
* @brief Callback of DAQ. Called with arguments of a vector of data
* spans for each channel, and a datatype descriptor. Callback should return
* false for a stop request.
*/
using ChannelView = std::vector<gsl::span<uint8_t>>;
using DaqCallback =
std::function<bool(ChannelView channel_data,
const DataTypeDescriptor &dtype_descr)>;
std::function<bool(std::unique_ptr<DaqData>)>;
/**
* @brief Base cass for all DAQ instances

View File

@ -1,14 +1,17 @@
#if LASP_HAS_RTAUDIO == 1
#include "lasp_rtaudiodaq.h"
#if LASP_HAS_RTAUDIO == 1
#include "lasp_daqconfig.h"
#include <RtAudio.h>
#include <atomic>
#include <cassert>
#include <cstdint>
#include <thread>
using std::atomic;
using std::cerr;
using std::endl;
using std::atomic;
using std::runtime_error;
using std::vector;
void fillRtAudioDeviceInfo(vector<DeviceInfo> &devinfolist) {
@ -25,6 +28,7 @@ void fillRtAudioDeviceInfo(vector<DeviceInfo> &devinfolist) {
// Device capabilities not successfully probed. Continue to next
continue;
}
// "Our device info struct"
DeviceInfo d;
switch (api) {
case RtAudio::LINUX_ALSA:
@ -66,19 +70,24 @@ void fillRtAudioDeviceInfo(vector<DeviceInfo> &devinfolist) {
RtAudioFormat formats = devinfo.nativeFormats;
if (formats & RTAUDIO_SINT8) {
d.availableDataTypes.push_back(dtype_int8);
d.availableDataTypes.push_back(
DataTypeDescriptor::DataType::dtype_int8);
}
if (formats & RTAUDIO_SINT16) {
d.availableDataTypes.push_back(dtype_int16);
d.availableDataTypes.push_back(
DataTypeDescriptor::DataType::dtype_int16);
}
/* if (formats & RTAUDIO_SINT32) { */
/* d.availableDataTypes.push_back(DataTypeDescriptor::DataType::dtype_int24);
*/
/* } */
if (formats & RTAUDIO_SINT32) {
d.availableDataTypes.push_back(dtype_int24);
}
if (formats & RTAUDIO_SINT32) {
d.availableDataTypes.push_back(dtype_fl32);
d.availableDataTypes.push_back(
DataTypeDescriptor::DataType::dtype_fl32);
}
if (formats & RTAUDIO_FLOAT64) {
d.availableDataTypes.push_back(dtype_fl64);
d.availableDataTypes.push_back(
DataTypeDescriptor::DataType::dtype_fl64);
}
if (d.availableDataTypes.size() == 0) {
std::cerr << "RtAudio: No data types found in device!" << endl;
@ -86,11 +95,7 @@ void fillRtAudioDeviceInfo(vector<DeviceInfo> &devinfolist) {
d.prefDataTypeIndex = d.availableDataTypes.size() - 1;
d.availableFramesPerBlock.push_back(512);
d.availableFramesPerBlock.push_back(1024);
d.availableFramesPerBlock.push_back(2048);
d.availableFramesPerBlock.push_back(4096);
d.availableFramesPerBlock.push_back(8192);
d.availableFramesPerBlock = {512, 1024, 2048, 4096, 8192};
d.prefFramesPerBlockIndex = 1;
devinfolist.push_back(d);
@ -103,211 +108,113 @@ int mycallback(void *outputBuffer, void *inputBuffer, unsigned int nFrames,
void myerrorcallback(RtAudioError::Type, const string &errorText);
class AudioDaq : public Daq {
class RtAudioDaq : public Daq {
SafeQueue<void *> *inqueue = NULL;
SafeQueue<void *> *outqueue = NULL;
SafeQueue<void *> *outDelayqueue = NULL;
RtAudio rtaudio;
const us nFramesPerBlock;
RtAudio *rtaudio = NULL;
RtAudio::StreamParameters *instreamparams = nullptr;
RtAudio::StreamParameters *outstreamparams = nullptr;
us nFramesPerBlock;
RtAudioDaq(const RtAudioDaq &) = delete;
RtAudioDaq &operator=(const RtAudioDaq &) = delete;
public:
AudioDaq(const DeviceInfo &devinfo, const DaqConfiguration &config)
: Daq(devinfo, config) {
RtAudioDaq(const DeviceInfo &devinfo, const DaqConfiguration &config)
: Daq(devinfo, config),
rtaudio(static_cast<RtAudio::Api>(devinfo.api.api_specific_subcode)),
nFramesPerBlock(Daq::framesPerBlock()) {
nFramesPerBlock = this->framesPerBlock();
std::unique_ptr<RtAudio::StreamParameters> inParams, outParams;
// We make sure not to run RtAudio in duplex mode. This seems to be buggy
// and untested. Better to use a hardware-type loopback into the system.
if (neninchannels(false) > 0 && nenoutchannels() > 0) {
throw runtime_error("RtAudio backend cannot run in duplex mode.");
}
assert(!monitorOutput);
if (neninchannels(false) > 0) {
instreamparams = new RtAudio::StreamParameters();
instreamparams->nChannels = getHighestInChannel() + 1;
if (instreamparams->nChannels < 1) {
if (neninchannels() > 0) {
inParams = std::make_unique<RtAudio::StreamParameters>();
// +1 to get the count.
inParams->nChannels = getHighestInChannel() + 1;
if (inParams->nChannels < 1) {
throw runtime_error("Invalid input number of channels");
}
instreamparams->firstChannel = 0;
instreamparams->deviceId = devinfo.api_specific_devindex;
}
inParams->firstChannel = 0;
inParams->deviceId = devinfo.api_specific_devindex;
if (nenoutchannels() > 0) {
outstreamparams = new RtAudio::StreamParameters();
outstreamparams->nChannels = getHighestOutChannel() + 1;
if (outstreamparams->nChannels < 1) {
} else {
outParams = std::make_unique<RtAudio::StreamParameters>();
outParams->nChannels = getHighestOutChannel() + 1;
if (outParams->nChannels < 1) {
throw runtime_error("Invalid output number of channels");
}
outstreamparams->firstChannel = 0;
outstreamparams->deviceId = devinfo.api_specific_devindex;
outParams->firstChannel = 0;
outParams->deviceId = devinfo.api_specific_devindex;
}
RtAudio::StreamOptions streamoptions;
streamoptions.flags = RTAUDIO_NONINTERLEAVED | RTAUDIO_HOG_DEVICE;
streamoptions.flags = RTAUDIO_HOG_DEVICE;
streamoptions.numberOfBuffers = 2;
streamoptions.streamName = "RtAudio stream";
streamoptions.priority = 0;
RtAudioFormat format;
const DataType dtype = DataType();
using Dtype = DataTypeDescriptor::DataType;
const Dtype dtype = dataType();
switch (dtype) {
case dtype_fl32:
case Dtype::dtype_fl32:
format = RTAUDIO_FLOAT32;
break;
case dtype_fl64:
case Dtype::dtype_fl64:
format = RTAUDIO_FLOAT64;
break;
case dtype_int8:
case Dtype::dtype_int8:
format = RTAUDIO_SINT8;
break;
case dtype_int16:
case Dtype::dtype_int16:
format = RTAUDIO_SINT16;
break;
case dtype_int32:
case Dtype::dtype_int32:
format = RTAUDIO_SINT32;
break;
default:
throw runtime_error("Invalid data type");
throw runtime_error("Invalid data type specified for DAQ stream.");
break;
}
try {
rtaudio = new RtAudio((RtAudio::Api)devinfo.api.api_specific_subcode);
if (!rtaudio) {
throw runtime_error("RtAudio allocation failed");
}
rtaudio->openStream(outstreamparams, instreamparams, format,
(us)samplerate(), (unsigned *)&nFramesPerBlock,
// Copy here, as it is used to return the *actual* number of frames per
// block.
unsigned int nFramesPerBlock_copy = nFramesPerBlock;
// Final step: open the stream.
rtaudio.openStream(&(*outParams), &(*inParams), format,
static_cast<us>(samplerate()), &nFramesPerBlock_copy,
&mycallback, (void *)this, &streamoptions,
&myerrorcallback);
} catch (RtAudioError &e) {
if (rtaudio)
delete rtaudio;
if (instreamparams)
delete instreamparams;
if (outstreamparams)
delete outstreamparams;
throw;
}
if (monitorOutput) {
outDelayqueue = new SafeQueue<void *>();
}
}
friend int mycallback(void *outputBuffer, void *inputBuffer,
int streamCallback(void *outputBuffer, void *inputBuffer,
unsigned int nFrames, double streamTime,
RtAudioStreamStatus status, void *userData);
RtAudioStreamStatus status) {
void start(SafeQueue<void *> *inqueue, SafeQueue<void *> *outqueue) {
this->inqueue = inqueue;
this->outqueue = outqueue;
if (monitorOutput) {
this->outDelayqueue = new SafeQueue<void *>();
}
auto dtype = dataType();
us neninchannels_inc_mon = neninchannels();
us nenoutchannels = this->nenoutchannels();
if (isRunning()) {
throw runtime_error("Stream already running");
}
if (neninchannels(false) > 0 && !inqueue) {
throw runtime_error("inqueue argument not given");
}
if (nenoutchannels() > 0 && !outqueue) {
throw runtime_error("outqueue argument not given");
}
assert(rtaudio);
rtaudio->startStream();
}
void stop() {
if (!isRunning()) {
cerr << "Stream is already stopped" << endl;
} else {
assert(rtaudio);
rtaudio->stopStream();
}
if (inqueue) {
inqueue = nullptr;
}
if (outqueue) {
outqueue = nullptr;
}
if (outDelayqueue) {
delete outDelayqueue;
outDelayqueue = nullptr;
}
}
bool isRunning() const { return (rtaudio && rtaudio->isStreamRunning()); }
~AudioDaq() {
assert(rtaudio);
if (isRunning()) {
stop();
}
if (rtaudio->isStreamOpen()) {
rtaudio->closeStream();
}
if (rtaudio)
delete rtaudio;
if (outDelayqueue)
delete outDelayqueue;
if (instreamparams)
delete instreamparams;
if (outstreamparams)
delete outstreamparams;
}
};
Daq *createRtAudioDevice(const DeviceInfo &devinfo,
const DaqConfiguration &config) {
AudioDaq *daq = NULL;
try {
daq = new AudioDaq(devinfo, config);
} catch (runtime_error &e) {
if (daq)
delete daq;
throw;
}
return daq;
}
int mycallback(void *outputBuffervoid, void *inputBuffervoid,
unsigned int nFrames, double streamTime,
RtAudioStreamStatus status, void *userData) {
u_int8_t *inputBuffer = (u_int8_t *)inputBuffervoid;
u_int8_t *outputBuffer = (u_int8_t *)outputBuffervoid;
AudioDaq *daq = (AudioDaq *)userData;
DataType dtype = daq->dataType();
us neninchannels_inc_mon = daq->neninchannels();
us nenoutchannels = daq->nenoutchannels();
bool monitorOutput = daq->monitorOutput;
us bytesperchan = dtype_map.at(dtype).sw * nFrames;
us monitorOffset = ((us)monitorOutput) * bytesperchan;
SafeQueue<void *> *inqueue = daq->inqueue;
SafeQueue<void *> *outqueue = daq->outqueue;
SafeQueue<void *> *outDelayqueue = daq->outDelayqueue;
const boolvec &eninchannels = daq->eninchannels;
const boolvec &enoutchannels = daq->enoutchannels;
if (inputBuffer || monitorOutput) {
if (inputBuffer) {
u_int8_t *inbuffercpy =
(u_int8_t *)malloc(bytesperchan * neninchannels_inc_mon);
if (inputBuffer) {
us j = 0; // OUR buffer channel counter
us i = 0; // RtAudio channel counter
for (int ch = daq->getLowestInChannel(); ch <= daq->getHighestInChannel();
ch++) {
for (int ch = getLowestInChannel(); ch <= getHighestInChannel(); ch++) {
if (eninchannels[ch]) {
memcpy(&(inbuffercpy[monitorOffset + j * bytesperchan]),
&(inputBuffer[i * bytesperchan]), bytesperchan);
@ -316,24 +223,9 @@ int mycallback(void *outputBuffervoid, void *inputBuffervoid,
i++;
}
}
if (monitorOutput) {
assert(outDelayqueue);
if (!daq->outDelayqueue->empty()) {
void *dat = daq->outDelayqueue->dequeue();
memcpy((void *)inbuffercpy, dat, bytesperchan);
free(dat);
} else {
cerr << "Warning: output delay queue appears empty!" << endl;
memset(inbuffercpy, 0, bytesperchan);
}
}
assert(inqueue);
inqueue->enqueue(inbuffercpy);
}
if (outputBuffer) {
assert(outqueue);
if (!outqueue->empty()) {
u_int8_t *outbuffercpy = (u_int8_t *)outqueue->dequeue();
us j = 0; // OUR buffer channel counter
@ -362,8 +254,90 @@ int mycallback(void *outputBuffervoid, void *inputBuffervoid,
}
return 0;
}
}
void myerrorcallback(RtAudioError::Type, const string &errorText) {
virtual void
start(std::optional<DaqCallback> inCallback,
std::optional<DaqCallback> outCallback) {
assert(!monitorOutput);
if (isRunning()) {
throw runtime_error("Stream already running");
}
if (neninchannels(false) > 0 && !inqueue) {
throw runtime_error("inqueue argument not given");
}
if (nenoutchannels() > 0 && !outqueue) {
throw runtime_error("outqueue argument not given");
}
assert(rtaudio);
rtaudio->startStream();
}
void stop() {
if (!isRunning()) {
cerr << "Stream is already stopped" << endl;
} else {
assert(rtaudio);
rtaudio->stopStream();
}
if (inqueue) {
inqueue = nullptr;
}
if (outqueue) {
outqueue = nullptr;
}
if (outDelayqueue) {
delete outDelayqueue;
outDelayqueue = nullptr;
}
}
bool isRunning() const { return (rtaudio && rtaudio->isStreamRunning()); }
~RtAudioDaq() {
assert(rtaudio);
if (isRunning()) {
stop();
}
if (rtaudio->isStreamOpen()) {
rtaudio->closeStream();
}
}
}
;
Daq *createRtAudioDevice(const DeviceInfo &devinfo,
const DaqConfiguration &config) {
AudioDaq *daq = NULL;
try {
daq = new AudioDaq(devinfo, config);
} catch (runtime_error &e) {
if (daq)
delete daq;
throw;
}
return daq;
}
int mycallback(void *outputBuffervoid, void *inputBuffervoid,
unsigned int nFrames, double streamTime,
RtAudioStreamStatus status, void *userData) {
void myerrorcallback(RtAudioError::Type, const string &errorText) {
cerr << errorText << endl;
}
}
int mycallback(void *outputBuffer, void *inputBuffer, unsigned int nFrames,
double streamTime, RtAudioStreamStatus status,
void *userData) {
return static_cast<RtAudioDaq *>(userData)->streamCallback(
outputBuffer, inputBuffer, nFrames, streamTime, status);
}
#endif // LASP_HAS_RTAUDIO == 1

View File

@ -62,7 +62,7 @@ class DT9837A : public Daq {
void threadFcn(std::optional<DaqCallback> inCallback,
std::optional<DaqCallback> outcallback);
public:
public:
DT9837A(const DeviceInfo &devinfo, const DaqConfiguration &config);
~DT9837A() {
@ -118,7 +118,7 @@ void DT9837A::start(std::optional<DaqCallback> inCallback,
}
class BufHandler {
protected:
protected:
DaqDeviceHandle _handle;
const DataTypeDescriptor dtype_descr;
us nchannels, nFramesPerBlock;
@ -130,7 +130,7 @@ class BufHandler {
us totalFramesCount = 0;
long long buffer_mid_idx;
public:
public:
BufHandler(DaqDeviceHandle handle, const DataTypeDescriptor dtype_descr,
const us nchannels, const us nFramesPerBlock, DaqCallback cb,
const double samplerate)
@ -145,7 +145,8 @@ class BufHandler {
};
class InBufHandler : public BufHandler {
bool monitorOutput;
public:
public:
InBufHandler(DT9837A &daq, DaqCallback cb)
: BufHandler(daq._handle, daq.dtypeDescr(), daq.neninchannels(),
daq._nFramesPerBlock, cb, daq.samplerate())
@ -207,29 +208,34 @@ class InBufHandler : public BufHandler {
topenqueued = false;
}
void operator()() {
ChannelView cv(nchannels);
bool operator()() {
auto runCallback = ([&](us totalOffset) {
us monitoroffset = monitorOutput ? 1 : 0;
for (us channel = monitoroffset; channel < (nchannels - monitoroffset);
channel++) {
cv[channel] =
gsl::span(reinterpret_cast<uint8_t *>(
&buf[totalOffset + channel * nFramesPerBlock]),
nFramesPerBlock * sizeof(double));
}
DaqData data(nchannels, nFramesPerBlock,
DataTypeDescriptor::DataType::dtype_fl64);
us ch_no = 0;
if (monitorOutput) {
cv[0] = gsl::span(
reinterpret_cast<uint8_t *>(
&buf[totalOffset + (nchannels - 1) * nFramesPerBlock]),
nFramesPerBlock * sizeof(double));
}
cb(cv, dtype_descr);
});
/* if(mon */
/* for (us channel = monitoroffset; channel < (nchannels - monitoroffset);
*/
/* channel++) { */
/* cv[channel] = */
/* gsl::span(reinterpret_cast<uint8_t *>( */
/* &buf[totalOffset + channel * nFramesPerBlock]), */
/* nFramesPerBlock * sizeof(double)); */
/* } */
/* cv[0] = gsl::span( */
/* cb(cv, dtype_descr); */
});
ScanStatus status;
TransferStatus transferStatus;
@ -262,20 +268,19 @@ class InBufHandler : public BufHandler {
topenqueued = true;
}
}
}
~InBufHandler() {
}
~InBufHandler() {
// At exit of the function, stop scanning.
DEBUGTRACE_ENTER;
UlError err = ulDaqInScanStop(_handle);
if (err != ERR_NO_ERROR) {
showErr(err);
}
}
}
};
class OutBufHandler : public BufHandler {
public:
public:
OutBufHandler(DT9837A &daq, DaqCallback cb)
: BufHandler(daq._handle, daq.dtypeDescr(), daq.neninchannels(),
daq._nFramesPerBlock, cb, daq.samplerate()) {
@ -471,7 +476,7 @@ DT9837A::DT9837A(const DeviceInfo &devinfo, const DaqConfiguration &config)
throw runtime_error("Fatal: could not set IEPE mode");
}
}
}
}
void fillUlDaqDeviceInfo(std::vector<DeviceInfo> &devinfolist) {
DEBUGTRACE_ENTER;