From 20c40a70d5e0b0e20209972eb045436b1f800ab8 Mon Sep 17 00:00:00 2001 From: Luke Campagnola <> Date: Wed, 4 Apr 2012 21:03:31 -0400 Subject: [PATCH] Added in flowchart's filter functions --- examples/Flowchart.py | 95 +++++++++------ flowchart/library/functions.py | 216 +++++++++++++++++++++++++++++++++ 2 files changed, 277 insertions(+), 34 deletions(-) create mode 100644 flowchart/library/functions.py diff --git a/examples/Flowchart.py b/examples/Flowchart.py index 2b4613f4..a80d9e74 100644 --- a/examples/Flowchart.py +++ b/examples/Flowchart.py @@ -1,61 +1,88 @@ # -*- coding: utf-8 -*- -import sys, os - -## Make sure pyqtgraph is importable -p = os.path.dirname(os.path.abspath(__file__)) -p = os.path.join(p, '..', '..') -sys.path.insert(0, p) +import initExample ## Add path to library (just for examples; you do not need this) from pyqtgraph.flowchart import Flowchart from pyqtgraph.Qt import QtGui - -#import pyqtgraph.flowchart as f +import pyqtgraph as pg +import numpy as np app = QtGui.QApplication([]) -#TETRACYCLINE = True + +win = QtGui.QMainWindow() +cw = QtGui.QWidget() +win.setCentralWidget(cw) +layout = QtGui.QGridLayout() +cw.setLayout(layout) fc = Flowchart(terminals={ 'dataIn': {'io': 'in'}, 'dataOut': {'io': 'out'} }) w = fc.widget() -w.resize(400,200) -w.show() -n1 = fc.createNode('Add', pos=(0,-80)) -n2 = fc.createNode('Subtract', pos=(140,-10)) -n3 = fc.createNode('Abs', pos=(0, 80)) -n4 = fc.createNode('Add', pos=(140,100)) +layout.addWidget(fc.widget(), 0, 0, 2, 1) -fc.connectTerminals(fc.dataIn, n1.A) -fc.connectTerminals(fc.dataIn, n1.B) -fc.connectTerminals(fc.dataIn, n2.A) -fc.connectTerminals(n1.Out, n4.A) -fc.connectTerminals(n1.Out, n2.B) -fc.connectTerminals(n2.Out, n3.In) -fc.connectTerminals(n3.Out, n4.B) -fc.connectTerminals(n4.Out, fc.dataOut) +pw1 = pg.PlotWidget() +pw2 = pg.PlotWidget() +layout.addWidget(pw1, 0, 1) +layout.addWidget(pw2, 1, 1) + +win.show() -def process(**kargs): - return fc.process(**kargs) +data = np.random.normal(size=1000) +data[200:300] += 1 +data += np.sin(np.linspace(0, 100, 1000)) + +fc.setInput(dataIn=data) + +pw1Node = fc.createNode('PlotWidget', pos=(0, -150)) +pw1Node.setPlot(pw1) + +pw2Node = fc.createNode('PlotWidget', pos=(150, -150)) +pw2Node.setPlot(pw2) + +fNode = fc.createNode('GaussianFilter', pos=(0, 0)) +fc.connectTerminals(fc.dataIn, fNode.In) +fc.connectTerminals(fc.dataIn, pw1Node.In) +fc.connectTerminals(fNode.Out, pw2Node.In) +fc.connectTerminals(fNode.Out, fc.dataOut) + + +#n1 = fc.createNode('Add', pos=(0,-80)) +#n2 = fc.createNode('Subtract', pos=(140,-10)) +#n3 = fc.createNode('Abs', pos=(0, 80)) +#n4 = fc.createNode('Add', pos=(140,100)) + +#fc.connectTerminals(fc.dataIn, n1.A) +#fc.connectTerminals(fc.dataIn, n1.B) +#fc.connectTerminals(fc.dataIn, n2.A) +#fc.connectTerminals(n1.Out, n4.A) +#fc.connectTerminals(n1.Out, n2.B) +#fc.connectTerminals(n2.Out, n3.In) +#fc.connectTerminals(n3.Out, n4.B) +#fc.connectTerminals(n4.Out, fc.dataOut) + + +#def process(**kargs): + #return fc.process(**kargs) -print process(dataIn=7) +#print process(dataIn=7) -fc.setInput(dataIn=3) +#fc.setInput(dataIn=3) -s = fc.saveState() -fc.clear() +#s = fc.saveState() +#fc.clear() -fc.restoreState(s) +#fc.restoreState(s) -fc.setInput(dataIn=3) +#fc.setInput(dataIn=3) -#f.NodeMod.TETRACYCLINE = False -if sys.flags.interactive == 0: +## Start Qt event loop unless running in interactive mode or using pyside. +import sys +if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): app.exec_() - diff --git a/flowchart/library/functions.py b/flowchart/library/functions.py new file mode 100644 index 00000000..66709415 --- /dev/null +++ b/flowchart/library/functions.py @@ -0,0 +1,216 @@ +def downsample(data, n, axis=0, xvals='subsample'): + """Downsample by averaging points together across axis. + If multiple axes are specified, runs once per axis. + If a metaArray is given, then the axis values can be either subsampled + or downsampled to match. + """ + ma = None + if isinstance(data, MetaArray): + ma = data + data = data.view(ndarray) + + + if hasattr(axis, '__len__'): + if not hasattr(n, '__len__'): + n = [n]*len(axis) + for i in range(len(axis)): + data = downsample(data, n[i], axis[i]) + return data + + nPts = int(data.shape[axis] / n) + s = list(data.shape) + s[axis] = nPts + s.insert(axis+1, n) + sl = [slice(None)] * data.ndim + sl[axis] = slice(0, nPts*n) + d1 = data[tuple(sl)] + #print d1.shape, s + d1.shape = tuple(s) + d2 = d1.mean(axis+1) + + if ma is None: + return d2 + else: + info = ma.infoCopy() + if 'values' in info[axis]: + if xvals == 'subsample': + info[axis]['values'] = info[axis]['values'][::n][:nPts] + elif xvals == 'downsample': + info[axis]['values'] = downsample(info[axis]['values'], n) + return MetaArray(d2, info=info) + + +def applyFilter(data, b, a, padding=100, bidir=True): + """Apply a linear filter with coefficients a, b. Optionally pad the data before filtering + and/or run the filter in both directions.""" + d1 = data.view(ndarray) + + if padding > 0: + d1 = numpy.hstack([d1[:padding], d1, d1[-padding:]]) + + if bidir: + d1 = scipy.signal.lfilter(b, a, scipy.signal.lfilter(b, a, d1)[::-1])[::-1] + else: + d1 = scipy.signal.lfilter(b, a, d1) + + if padding > 0: + d1 = d1[padding:-padding] + + if isinstance(data, MetaArray): + return MetaArray(d1, info=data.infoCopy()) + else: + return d1 + +def besselFilter(data, cutoff, order=1, dt=None, btype='low', bidir=True): + """return data passed through bessel filter""" + if dt is None: + try: + tvals = data.xvals('Time') + dt = (tvals[-1]-tvals[0]) / (len(tvals)-1) + except: + raise Exception('Must specify dt for this data.') + + b,a = scipy.signal.bessel(order, cutoff * dt, btype=btype) + + return applyFilter(data, b, a, bidir=bidir) + #base = data.mean() + #d1 = scipy.signal.lfilter(b, a, data.view(ndarray)-base) + base + #if isinstance(data, MetaArray): + #return MetaArray(d1, info=data.infoCopy()) + #return d1 + +def butterworthFilter(data, wPass, wStop=None, gPass=2.0, gStop=20.0, order=1, dt=None, btype='low', bidir=True): + """return data passed through bessel filter""" + if dt is None: + try: + tvals = data.xvals('Time') + dt = (tvals[-1]-tvals[0]) / (len(tvals)-1) + except: + raise Exception('Must specify dt for this data.') + + if wStop is None: + wStop = wPass * 2.0 + ord, Wn = scipy.signal.buttord(wPass*dt*2., wStop*dt*2., gPass, gStop) + #print "butterworth ord %f Wn %f c %f sc %f" % (ord, Wn, cutoff, stopCutoff) + b,a = scipy.signal.butter(ord, Wn, btype=btype) + + return applyFilter(data, b, a, bidir=bidir) + + +def rollingSum(data, n): + d1 = data.copy() + d1[1:] += d1[:-1] # integrate + d2 = np.empty(len(d1) - n + 1, dtype=data.dtype) + d2[0] = d1[n-1] # copy first point + d2[1:] = d1[n:] - d1[:-n] # subtract + return d2 + + +def mode(data, bins=None): + """Returns location max value from histogram.""" + if bins is None: + bins = int(len(data)/10.) + if bins < 2: + bins = 2 + y, x = np.histogram(data, bins=bins) + ind = np.argmax(y) + mode = 0.5 * (x[ind] + x[ind+1]) + return mode + +def modeFilter(data, window=500, step=None, bins=None): + """Filter based on histogram-based mode function""" + d1 = data.view(np.ndarray) + vals = [] + l2 = int(window/2.) + if step is None: + step = l2 + i = 0 + while True: + if i > len(data)-step: + break + vals.append(mode(d1[i:i+window], bins)) + i += step + + chunks = [np.linspace(vals[0], vals[0], l2)] + for i in range(len(vals)-1): + chunks.append(np.linspace(vals[i], vals[i+1], step)) + remain = len(data) - step*(len(vals)-1) - l2 + chunks.append(np.linspace(vals[-1], vals[-1], remain)) + d2 = np.hstack(chunks) + + if isinstance(data, MetaArray): + return MetaArray(d2, info=data.infoCopy()) + return d2 + +def denoise(data, radius=2, threshold=4): + """Very simple noise removal function. Compares a point to surrounding points, + replaces with nearby values if the difference is too large.""" + + + r2 = radius * 2 + d1 = data.view(ndarray) + d2 = data[radius:] - data[:-radius] #a derivative + #d3 = data[r2:] - data[:-r2] + #d4 = d2 - d3 + stdev = d2.std() + #print "denoise: stdev of derivative:", stdev + mask1 = d2 > stdev*threshold #where derivative is large and positive + mask2 = d2 < -stdev*threshold #where derivative is large and negative + maskpos = mask1[:-radius] * mask2[radius:] #both need to be true + maskneg = mask1[radius:] * mask2[:-radius] + mask = maskpos + maskneg + d5 = np.where(mask, d1[:-r2], d1[radius:-radius]) #where both are true replace the value with the value from 2 points before + d6 = np.empty(d1.shape, dtype=d1.dtype) #add points back to the ends + d6[radius:-radius] = d5 + d6[:radius] = d1[:radius] + d6[-radius:] = d1[-radius:] + + if isinstance(data, MetaArray): + return MetaArray(d6, info=data.infoCopy()) + return d6 + +def adaptiveDetrend(data, x=None, threshold=3.0): + """Return the signal with baseline removed. Discards outliers from baseline measurement.""" + if x is None: + x = data.xvals(0) + + d = data.view(ndarray) + + d2 = scipy.signal.detrend(d) + + stdev = d2.std() + mask = abs(d2) < stdev*threshold + #d3 = where(mask, 0, d2) + #d4 = d2 - lowPass(d3, cutoffs[1], dt=dt) + + lr = stats.linregress(x[mask], d[mask]) + base = lr[1] + lr[0]*x + d4 = d - base + + if isinstance(data, MetaArray): + return MetaArray(d4, info=data.infoCopy()) + return d4 + + +def histogramDetrend(data, window=500, bins=50, threshold=3.0): + """Linear detrend. Works by finding the most common value at the beginning and end of a trace, excluding outliers.""" + + d1 = data.view(np.ndarray) + d2 = [d1[:window], d1[-window:]] + v = [0, 0] + for i in [0, 1]: + d3 = d2[i] + stdev = d3.std() + mask = abs(d3-np.median(d3)) < stdev*threshold + d4 = d3[mask] + y, x = np.histogram(d4, bins=bins) + ind = np.argmax(y) + v[i] = 0.5 * (x[ind] + x[ind+1]) + + base = np.linspace(v[0], v[1], len(data)) + d3 = data.view(np.ndarray) - base + + if isinstance(data, MetaArray): + return MetaArray(d3, info=data.infoCopy()) + return d3 +