Merge pull request #1296 from campagnola/acq4-merge

update h5py deps in metaarray
This commit is contained in:
Luke Campagnola 2020-07-06 02:34:23 -07:00 committed by GitHub
commit 41f0623014
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -21,6 +21,13 @@ from ..python2_3 import basestring
USE_HDF5 = True
try:
import h5py
# Older h5py versions tucked Group and Dataset deeper inside the library:
if not hasattr(h5py, 'Group'):
import h5py.highlevel
h5py.Group = h5py.highlevel.Group
h5py.Dataset = h5py.highlevel.Dataset
HAVE_HDF5 = True
except:
USE_HDF5 = False
@ -124,7 +131,6 @@ class MetaArray(object):
def __init__(self, data=None, info=None, dtype=None, file=None, copy=False, **kwargs):
object.__init__(self)
#self._infoOwned = False
self._isHDF = False
if file is not None:
@ -190,58 +196,22 @@ class MetaArray(object):
return ['MetaArray']
else:
return name == 'MetaArray'
#def __array_finalize__(self,obj):
### array_finalize is called every time a MetaArray is created
### (whereas __new__ is not necessarily called every time)
### obj is the object from which this array was generated (for example, when slicing or view()ing)
## We use the getattr method to set a default if 'obj' doesn't have the 'info' attribute
##print "Create new MA from object", str(type(obj))
##import traceback
##traceback.print_stack()
##print "finalize", type(self), type(obj)
#if not hasattr(self, '_info'):
##if isinstance(obj, MetaArray):
##print " copy info:", obj._info
#self._info = getattr(obj, '_info', [{}]*(obj.ndim+1))
#self._infoOwned = False ## Do not make changes to _info until it is copied at least once
##print " self info:", self._info
## We could have checked first whether self._info was already defined:
##if not hasattr(self, 'info'):
## self._info = getattr(obj, 'info', {})
def __getitem__(self, ind):
#print "getitem:", ind
## should catch scalar requests as early as possible to speed things up (?)
nInd = self._interpretIndexes(ind)
#a = np.ndarray.__getitem__(self, nInd)
a = self._data[nInd]
if len(nInd) == self.ndim:
if np.all([not isinstance(ind, slice) for ind in nInd]): ## no slices; we have requested a single value from the array
if np.all([not isinstance(ind, (slice, np.ndarray)) for ind in nInd]): ## no slices; we have requested a single value from the array
return a
#if type(a) != type(self._data) and not isinstance(a, np.ndarray): ## indexing returned single value
#return a
## indexing returned a sub-array; generate new info array to go with it
#print " new MA:", type(a), a.shape
info = []
extraInfo = self._info[-1].copy()
for i in range(0, len(nInd)): ## iterate over all axes
#print " axis", i
if type(nInd[i]) in [slice, list] or isinstance(nInd[i], np.ndarray): ## If the axis is sliced, keep the info but chop if necessary
#print " slice axis", i, nInd[i]
#a._info[i] = self._axisSlice(i, nInd[i])
#print " info:", a._info[i]
info.append(self._axisSlice(i, nInd[i]))
else: ## If the axis is indexed, then move the information from that single index to the last info dictionary
#print "indexed:", i, nInd[i], type(nInd[i])
newInfo = self._axisSlice(i, nInd[i])
name = None
colName = None
@ -270,16 +240,8 @@ class MetaArray(object):
else:
extraInfo['name'] = name
#print "Lost info:", newInfo
#a._info[i] = None
#if 'name' in newInfo:
#a._info[-1][newInfo['name']] = newInfo
info.append(extraInfo)
#self._infoOwned = False
#while None in a._info:
#a._info.remove(None)
return MetaArray(a, info=info)
@property
@ -313,22 +275,15 @@ class MetaArray(object):
return getattr(self._data, attr)
else:
raise AttributeError(attr)
#return lambda *args, **kwargs: MetaArray(getattr(a.view(ndarray), attr)(*args, **kwargs)
def __eq__(self, b):
return self._binop('__eq__', b)
def __ne__(self, b):
return self._binop('__ne__', b)
#if isinstance(b, MetaArray):
#b = b.asarray()
#return self.asarray() != b
def __sub__(self, b):
return self._binop('__sub__', b)
#if isinstance(b, MetaArray):
#b = b.asarray()
#return MetaArray(self.asarray() - b, info=self.infoCopy())
def __add__(self, b):
return self._binop('__add__', b)
@ -498,14 +453,7 @@ class MetaArray(object):
numOk = True ## Named indices not started yet; numbered sill ok
for i in range(0,len(ind)):
(axis, index, isNamed) = self._interpretIndex(ind[i], i, numOk)
#try:
nInd[axis] = index
#except:
#print "ndim:", self.ndim
#print "axis:", axis
#print "index spec:", ind[i]
#print "index num:", index
#raise
if isNamed:
numOk = False
return tuple(nInd)
@ -684,9 +632,7 @@ class MetaArray(object):
def __str__(self):
return self.__repr__()
def axisCollapsingFn(self, fn, axis=None, *args, **kargs):
#arr = self.view(np.ndarray)
fn = getattr(self._data, fn)
if axis is None:
return fn(axis, *args, **kargs)
@ -823,8 +769,6 @@ class MetaArray(object):
## No axes are dynamic, just read the entire array in at once
if dynAxis is None:
#if rewriteDynamic is not None:
#raise Exception("")
if meta['type'] == 'object':
if mmap:
raise Exception('memmap not supported for arrays with dtype=object')
@ -834,9 +778,7 @@ class MetaArray(object):
subarr = np.memmap(fd, dtype=meta['type'], mode='r', shape=meta['shape'])
else:
subarr = np.fromstring(fd.read(), dtype=meta['type'])
#subarr = subarr.view(subtype)
subarr.shape = meta['shape']
#subarr._info = meta['info']
## One axis is dynamic, read in a frame at a time
else:
if mmap:
@ -993,9 +935,9 @@ class MetaArray(object):
data[k] = val
for k in root:
obj = root[k]
if isinstance(obj, h5py.highlevel.Group):
if isinstance(obj, h5py.Group):
val = MetaArray.readHDF5Meta(obj)
elif isinstance(obj, h5py.highlevel.Dataset):
elif isinstance(obj, h5py.Dataset):
if mmap:
val = MetaArray.mapHDF5Array(obj)
else:
@ -1256,89 +1198,6 @@ class MetaArray(object):
file.close()
else:
return ret
#class H5MetaList():
#def rewriteContiguous(fileName, newName):
#"""Rewrite a dynamic array file as contiguous"""
#def _readData2(fd, meta, subtype, mmap):
### read in axis values
#dynAxis = None
#frameSize = 1
### read in axis values for any axis that specifies a length
#for i in range(len(meta['info'])):
#ax = meta['info'][i]
#if ax.has_key('values_len'):
#if ax['values_len'] == 'dynamic':
#if dynAxis is not None:
#raise Exception("MetaArray has more than one dynamic axis! (this is not allowed)")
#dynAxis = i
#else:
#ax['values'] = fromstring(fd.read(ax['values_len']), dtype=ax['values_type'])
#frameSize *= ax['values_len']
#del ax['values_len']
#del ax['values_type']
### No axes are dynamic, just read the entire array in at once
#if dynAxis is None:
#raise Exception('Array has no dynamic axes.')
### One axis is dynamic, read in a frame at a time
#else:
#if mmap:
#raise Exception('memmap not supported for non-contiguous arrays. Use rewriteContiguous() to convert.')
#ax = meta['info'][dynAxis]
#xVals = []
#frames = []
#frameShape = list(meta['shape'])
#frameShape[dynAxis] = 1
#frameSize = np.prod(frameShape)
#n = 0
#while True:
### Extract one non-blank line
#while True:
#line = fd.readline()
#if line != '\n':
#break
#if line == '':
#break
### evaluate line
#inf = eval(line)
### read data block
##print "read %d bytes as %s" % (inf['len'], meta['type'])
#if meta['type'] == 'object':
#data = pickle.loads(fd.read(inf['len']))
#else:
#data = fromstring(fd.read(inf['len']), dtype=meta['type'])
#if data.size != frameSize * inf['numFrames']:
##print data.size, frameSize, inf['numFrames']
#raise Exception("Wrong frame size in MetaArray file! (frame %d)" % n)
### read in data block
#shape = list(frameShape)
#shape[dynAxis] = inf['numFrames']
#data.shape = shape
#frames.append(data)
#n += inf['numFrames']
#if 'xVals' in inf:
#xVals.extend(inf['xVals'])
#subarr = np.concatenate(frames, axis=dynAxis)
#if len(xVals)> 0:
#ax['values'] = array(xVals, dtype=ax['values_type'])
#del ax['values_len']
#del ax['values_type']
#subarr = subarr.view(subtype)
#subarr._info = meta['info']
#return subarr
if __name__ == '__main__':