def register_command(input, output, overwrite, url, method, size): input = abspath(input) output = input + '_registered' if output is None else abspath(output) if isdir(output) and not overwrite: error('directory already exists and overwrite is false') return elif isdir(output) and overwrite: rmtree(output) mkdir(output) engine = setup_spark(url) status('reading data from %s' % input) if len(glob(join(input, '*.tif'))) > 0: data = fromtif(input, engine=engine) ext = 'tif' elif len(glob(join(input, '*.tiff'))) > 0: data = fromtif(input, ext='tiff', engine=engine) ext = 'tif' elif len(glob(join(input, '*.bin'))) > 0: data = frombinary(input, engine=engine) ext = 'bin' else: error('no tif or binary files found in %s' % input) return status('registering') if method == 'normal': newdata, shifts = register(data) elif method == 'blocks': newdata = register_blocks(data, size=(size, size)) elif method == 'piecewise': newdata = register_blocks_piecewise(data, size=(size, size)) else: error('registration method %s not recognized' % method) if ext == 'tif': newdata.totif(output, overwrite=overwrite) elif ext == 'bin': newdata.tobinary(output, overwrite=overwrite) else: error('extenstion %s not recognized' % ext) metafiles = glob(join(input, 'meta*.json')) if len(metafiles) > 0: status('copying metadata') for f in metafiles: copy(f, output) #shifts = DataFrame(shifts) #shifts.to_csv(join(output, 'shifts.csv')) success('registration complete')
def preprocess_command(input, output, bidi, amount, detrend, order, url, overwrite): input = abspath(input) output = input + '_preprocessed' if output is None else abspath(output) if isdir(output) and not overwrite: error('directory already exists and overwrite is false') return elif isdir(output) and overwrite: rmtree(output) mkdir(output) engine = setup_spark(url) status('reading data from %s' % input) if len(glob(join(input, '*.tif'))) > 0: data = fromtif(input, engine=engine) ext = 'tif' elif len(glob(join(input, '*.tiff'))) > 0: data = fromtif(input, ext='tiff', engine=engine) ext = 'tif' elif len(glob(join(input, '*.bin'))) > 0: data = frombinary(input, engine=engine) ext = 'bin' else: error('no tif or binary files found in %s' % input) return if bidi: status('start bidi correction') if len(data.shape) > 4: error('Data length %d currently not supported' % len(data.shape)) else: newdata, amount = correct(data, amount=amount) status('shifted %s pixels' % amount) else: newdata = data if detrend: status('start detrending') newdata = newdata.map_as_series(lambda x: detrend_func(x, order)) if ext == 'tif': newdata.totif(output, overwrite=overwrite) elif ext == 'bin': newdata.tobinary(output, overwrite=overwrite) else: error('extenstion %s not recognized' % ext) metafiles = glob(join(input, 'meta*.json')) if len(metafiles) > 0: status('copying metadata') for f in metafiles: copy(f, output) success('preprocessing complete')
def extract_command(input, output, diameter, method, url, overwrite): input = abspath(input) output = input + '_extracted' if output is None else abspath(output) if isfile(join(output, 'regions-' + method + '.json')) and not overwrite: error('file already exists and overwrite is false') return elif not isdir(output): mkdir(output) engine = setup_spark(url) status('reading data from %s' % input) if len(glob(join(input, '*.tif'))) > 0: data = fromtif(input, engine=engine) ext = 'tif' elif len(glob(join(input, '*.tiff'))) > 0: data = fromtif(input, ext='tiff', engine=engine) ext = 'tif' elif len(glob(join(input, '*.bin'))) > 0: data = frombinary(input, engine=engine) ext = 'bin' else: error('no tif or binary files found in %s' % input) return status('extracting') if method == 'CC': algorithm = CC(diameter=diameter, clip_limit=0.04, theshold=0.2, sigma_blur=1, boundary=(1,1)) unmerged = algorithm.fit(data) model = unmerged.merge(0.1) model = filter_shape(model, min_diameter = 0.7*diameter, max_diameter = 1.3*diameter, min_eccentricity = 0.2) elif method == 'NMF': algorithm = NMF(k=10, percentile=99, max_iter=50, overlap=0.1) unmerged = algorithm.fit(data, chunk_size=(50,50), padding=(25,25)) model = unmerged.merge(overlap=0.20, max_iter=3, k_nearest=10) else: error('extraction method %s not recognized' % method) model.save(join(output, 'regions-' + method + '.json')) success('extraction complete')
def trace_command(input, output, regions, url, overwrite): input = abspath(input) output = input + '_traces' if output is None else abspath(output) if isdir(output) and not overwrite: error('directory already exists and overwrite is false') return elif isdir(output) and overwrite: rmtree(output) mkdir(output) model = load(regions) engine = setup_spark(url) status('reading data from %s' % input) if len(glob(join(input, '*.tif'))) > 0: data = fromtif(input, engine=engine) ext = 'tif' elif len(glob(join(input, '*.tiff'))) > 0: data = fromtif(input, ext='tiff', engine=engine) ext = 'tif' elif len(glob(join(input, '*.bin'))) > 0: data = frombinary(input, engine=engine) ext = 'bin' else: error('no tif or binary files found in %s' % input) return status('calculating traces') traces = dff(data, model) traces.tobinary(output, overwrite=overwrite) metafiles = glob(join(input, 'meta*.json')) if len(metafiles) > 0: status('copying metadata') for f in metafiles: copy(f, output) success('traces complete')
def test_register_shape(): data = fromtif('test/resources/output') registered, shifts = register(data) assert registered.shape == (23, 464, 576) assert shifts.shape == (23, 2)
def test_downsample_shape(): data = fromtif('test/resources/output') result = downsample(data, ds=2, dt=2) assert result.shape == (12, 232, 288)
def test_reference_shape(): data = fromtif('test/resources/output') ref = reference(data) assert ref.shape == (464, 576)
def test_convert_ground_truth(): data, meta = load('test/resources/input') newdata, newmeta = convert(data, meta) truth = fromtif('test/resources/output') assert newdata.shape == truth.shape assert allclose(newdata.clip(0, inf), truth)
def load(path, nplanes=None, engine=None): data = fromtif(path, nplanes=nplanes, engine=engine) return data
def summarize_command(input, output, localcorr, mean, movie, ds, dt, size, url, overwrite): input = abspath(input) output = input + '_summary' if output is None else abspath(output) engine = setup_spark(url) status('reading data from %s' % input) if len(glob(join(input, '*.tif'))) > 0: data = fromtif(input, engine=engine) ext = 'tif' elif len(glob(join(input, '*.tiff'))) > 0: data = fromtif(input, ext='tiff', engine=engine) ext = 'tif' elif len(glob(join(input, '*.bin'))) > 0: data = frombinary(input, engine=engine) ext = 'bin' else: error('no tif or binary files found in %s' % input) return if not isdir(output): mkdir(output) if mean: if not isfile(join(output, 'mean.tif')) or overwrite: status('summarizing-mean') m = data.mean().toarray() imsave(join(output, 'mean.tif'), m.clip(0, inf).astype('uint16'), plugin='tifffile', photometric='minisblack') else: warn('mean.tif already exists and overwrite is false') if localcorr: if type(size) == tuple: name = 'localcorr-ds%s.tif' % ''.join(map(str,size)) else: name = 'localcorr-ds%s.tif' % size if not isfile(join(output, name)) or overwrite: status('summarizing-localcorr') if len(data.shape) == 4: size = (1, size, size) lc = data.localcorr(size) imsave(join(output, name), lc.astype('float32'), plugin='tifffile', photometric='minisblack') else: warn('%s already exists and overwrite is false' % name) if movie: if type(ds) == tuple: dsString = '-ds'.join(map(str,ds)) elif ds == None: dsString = '' else: dsString = '-ds' + str(ds) if dt == None: dtString = '' else: dtString = '-dt' + str(dt) if len(data.shape) == 4: pString = '-p%s' % 0 else: pString = '' name = 'movie%s%s%s.mp4' % (dsString, dtString, pString) if not isfile(join(output, name)) or overwrite: status('summarizing-movie') metafiles = glob(join(input, '*.json')) if len(metafiles) == 0: warn('no json metadata found in %s' % input) meta = None else: with open(metafiles[0]) as fid: meta = json.load(fid) rate = meta['volumeRate'] ppum = meta['rois'][0]['npixels'][1]/meta['rois'][0]['size'][1]/1000 scale = 100*round(meta['rois'][0]['size'][1]*1.8) if dt is not None and meta is not None: rate = rate/dt if ds is not None and meta is not None: ppum = ppum/ds if len(data.shape) == 4: ds = (1, ds, ds) result = downsample(data, ds=ds, dt=dt).toarray() if dt == None: dt = 0 writer = animation.FFMpegWriter(fps=15, metadata=dict(artist='Me'), bitrate=40000) def animate(mv, name): clim = 5*percentile(mv, 90) img = mv[mv.shape[0]/2,:,:] nframes = mv.shape[0]-dt fig = plt.figure(figsize=[12, 12.0*img.shape[0]/img.shape[1]]) fig.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=None, hspace=None) ax = plt.gca() im = image(img, clim=(0, clim), ax=ax) if meta is not None: time = ax.text(.97*img.shape[1], .04*img.shape[0], '%.1f s' % 0, color='white', fontsize=22, ha='right', fontdict={'family': 'monospace'}); ax.plot([.04*img.shape[1], .04*img.shape[1]+scale*ppum], [.94*img.shape[0], .94*img.shape[0]], 'w', lw=2); sclae = ax.text(.04*img.shape[1]+scale*ppum/2, .97*img.shape[0], '%d um' % scale, color='white', fontsize=22, ha='center', fontdict={'family': 'monospace'}); plt.xlim([0, img.shape[1]]); plt.ylim([img.shape[0], 0]); def update(f): im.set_array(mv[dt/2+f]) if meta is not None: time.set_text('%.1f s' % ((dt/2+f)/rate)) ani = animation.FuncAnimation(fig, update, nframes, blit=False) ani.save(join(output, name), writer=writer) if len(result.shape) == 4: for plane in range(result.shape(1)): pString = '-p%s' % plane name = 'movie%s%s%s.mp4' % (dsString, dtString, pString) animate(result[:,plane,:,:], name) else: animate(result, name) else: warn('%s already exists and overwrite is false' % name) metafiles = glob(join(input, 'meta*.json')) if len(metafiles) > 0: status('copying metadata') for f in metafiles: copy(f, output) success('summary complete')