reduce memory usage during data generation
random.normal() generates as float64 and gets converted to a smaller dtype. generating all the needed data in a single call thus uses a lot more memory than is necessary. this changes it such that smaller chunks are generated. data clipping is also changed to be in-place. the gaussian filtering which gave the video a washed-out look is also removed. this also contributed to data generation time.
This commit is contained in:
parent
cbafc773f5
commit
fda8731dab
@ -165,16 +165,22 @@ def mkData():
|
|||||||
mx = 1.0
|
mx = 1.0
|
||||||
else:
|
else:
|
||||||
raise ValueError(f"unable to handle dtype: {cacheKey[0]}")
|
raise ValueError(f"unable to handle dtype: {cacheKey[0]}")
|
||||||
|
|
||||||
|
chan_shape = (width, height)
|
||||||
if ui.rgbCheck.isChecked():
|
if ui.rgbCheck.isChecked():
|
||||||
data = xp.random.normal(size=(frames,width,height,3), loc=loc, scale=scale)
|
frame_shape = chan_shape + (3,)
|
||||||
data = pg.gaussianFilter(data, (0, 6, 6, 0))
|
|
||||||
else:
|
else:
|
||||||
data = xp.random.normal(size=(frames,width,height), loc=loc, scale=scale)
|
frame_shape = chan_shape
|
||||||
data = pg.gaussianFilter(data, (0, 6, 6))
|
data = xp.empty((frames,) + frame_shape, dtype=dt)
|
||||||
if cacheKey[0] != 'float':
|
view = data.reshape((-1,) + chan_shape)
|
||||||
data = xp.clip(data, 0, mx)
|
for idx in range(view.shape[0]):
|
||||||
data = data.astype(dt)
|
subdata = xp.random.normal(loc=loc, scale=scale, size=chan_shape)
|
||||||
|
# note: gaussian filtering has been removed as it slows down array
|
||||||
|
# creation greatly.
|
||||||
|
if cacheKey[0] != 'float':
|
||||||
|
xp.clip(subdata, 0, mx, out=subdata)
|
||||||
|
view[idx] = subdata
|
||||||
|
|
||||||
data[:, 10, 10:50] = mx
|
data[:, 10, 10:50] = mx
|
||||||
data[:, 9:12, 48] = mx
|
data[:, 9:12, 48] = mx
|
||||||
data[:, 8:13, 47] = mx
|
data[:, 8:13, 47] = mx
|
||||||
|
Loading…
Reference in New Issue
Block a user