def parse_args(parser: argparse.ArgumentParser): """ Parses arguments and returns ops with parameters filled in. """ args = parser.parse_args() dargs = vars(args) ops0 = default_ops() ops = np.load(args.ops, allow_pickle=True).item() if args.ops else {} set_param_msg = '->> Setting {0} to {1}' # options defined in the cli take precedence over the ones in the ops file for k in ops0: default_key = ops0[k] args_key = dargs[k] if k in ['fast_disk', 'save_folder', 'save_path0']: if args_key: ops[k] = args_key print(set_param_msg.format(k, ops[k])) elif type(default_key) in [np.ndarray, list]: n = np.array(args_key) if np.any(n != np.array(default_key)): ops[k] = n.astype(type(default_key)) print(set_param_msg.format(k, ops[k])) elif isinstance(default_key, bool): args_key = bool( int(args_key)) # bool('0') is true, must convert to int if default_key != args_key: ops[k] = args_key print(set_param_msg.format(k, ops[k])) # checks default param to args param by converting args to same type elif not (default_key == type(default_key)(args_key)): ops[k] = type(default_key)(args_key) print(set_param_msg.format(k, ops[k])) return args, ops
def test_ops(tmpdir, data_dir): """Initializes ops to be used for test. Also, uses tmpdir fixture to create a unique temporary dir for each test.""" ops = suite2p.default_ops() ops.update({ 'use_builtin_classifier': True, 'data_path': [data_dir], 'save_path0': str(tmpdir) }) return ops
def setup_and_teardown(tmpdir): """ Initializes ops to be used for test. Also, uses tmpdir fixture to create a unique temporary dir for each test. Then, removes temporary directory after test is completed. """ ops = suite2p.default_ops() ops['data_path'] = [test_data_dir] ops['save_path0'] = str(tmpdir) yield ops, str(tmpdir) tmpdir_path = Path(str(tmpdir)) if tmpdir_path.is_dir(): shutil.rmtree(tmpdir) print('Successful removal of tmp_path {}.'.format(tmpdir))
def parse_arguments(): parser = argparse.ArgumentParser(description='Suite2p parameters') parser.add_argument('--ops', default=[], type=str, help='options') parser.add_argument('--db', default=[], type=str, help='options') ops0 = suite2p.default_ops() for k in ops0.keys(): v = dict(default=ops0[k], help='{0}: {1}'.format(k, ops0[k])) if k in ['fast_disk', 'save_folder', 'save_path0']: v['default'] = None v['type'] = str if type(v['default']) in [np.ndarray, list]: if len(v['default']): v['nargs'] = '+' v['type'] = type(v['default'][0]) parser.add_argument('--' + k, **v) args = parser.parse_args() dargs = vars(args) ops = {} db = {} if len(args.ops) > 0: ops = np.load(args.ops, allow_pickle=True).item() # options defined in the cli take precedence over the ones in the ops file for k in ops0: v = ops0[k] n = dargs[k] if k in ['fast_disk', 'save_folder', 'save_path0']: if not n is None: ops[k] = n print('->> Setting {0} to {1}'.format(k, ops[k])) elif type(v) in [np.ndarray, list]: if len(n): n = np.array(n) if np.any(n != np.array(v)): ops[k] = n.astype(type(v)) print('->> Setting {0} to {1}'.format(k, ops[k])) else: if not v == type(v)(n): ops[k] = type(v)(n) print('->> Setting {0} to {1}'.format(k, ops[k])) if len(args.db) > 0: db = np.load(args.db, allow_pickle=True).item() suite2p.run_s2p(ops, db) else: suite2p.run_gui()
def add_args(parser: argparse.ArgumentParser): """ Adds suite2p ops arguments to parser. """ parser.add_argument('--single_plane', action='store_true', help='run single plane ops') parser.add_argument('--ops', default=[], type=str, help='options') parser.add_argument('--db', default=[], type=str, help='options') ops0 = default_ops() for k in ops0.keys(): v = dict(default=ops0[k], help='{0} : {1}'.format(k, ops0[k])) if k in ['fast_disk', 'save_folder', 'save_path0']: v['default'] = None v['type'] = str if (type(v['default']) in [np.ndarray, list]) and len(v['default']): v['nargs'] = '+' v['type'] = type(v['default'][0]) parser.add_argument('--' + k, **v) return parser
import numpy as np import sys import os from suite2p import run_s2p, default_ops import shutil import glob fnames = [os.path.normpath(sys.argv[1])] # set your options for running data_path = os.path.dirname(fnames[0]) range2 = int(fnames[0].split('range')[-1].split('_')[0]) step = int(fnames[0].split('step')[-1].split('_')[0]) nplanes = int((range2 / step) + 1) ops = default_ops() ops['fs'] = 2 ops['tau'] = 1.6 ops['save_mat'] = 1 ops['1Preg'] = True ops['denoise'] = True ops['diameter'] = [3, 4, 5] ops['threshold_scaling'] = 0.5 ops['max_iterations'] = 100 ops['smooth_sigma'] = 4 ops['high_pass'] = 30 ops['nchannels'] = 1 ops['spatial_taper'] = 5 ops['pre_smooth'] = 2 ops['max_iterations'] = 100 ops['inner_neuropil_radius'] = 1 ops['neuropil_extract'] = 0
from pathlib import Path import suite2p from tempfile import TemporaryDirectory ops = suite2p.default_ops() ops['nplanes'] = 2 ops['nchannels'] = 2 data_path = Path(__file__).joinpath('../../../data/test_data') ops['data_path'] = [str(data_path)] ops['save_path0'] = TemporaryDirectory().name suite2p.run_s2p(ops)
def test_ops(tmpdir, data_dir): """Initializes ops to be used for test. Also, uses tmpdir fixture to create a unique temporary dir for each test.""" ops = suite2p.default_ops() ops['data_path'] = [data_dir] ops['save_path0'] = str(tmpdir) return ops
'input_format': "h5", 'h5py_key': 'data', 'sparse_mode': True, 'threshold_scaling': 1., 'nchannels': 2, 'nframes': data_bin.n_frames, 'Ly': data_bin.shape[1], 'Lx': data_bin.shape[2], 'xrange': [50, 462], 'yrange': [50, 745], 'nbinned': 10000, 'meanImg': data_bin.sampled_mean(), 'meanImag_chan2': data2_bin.sampled_mean(), } ops = {**s2p.default_ops(), **combined_ops} np.save(ops['ops_path'], ops) ops = s2p.run_s2p(ops=ops)
def run(self): self.logger.name = type(self).__name__ self.logger.setLevel(self.args.pop('log_level')) # explicitly set default Suite2P args that are not # already specified in self.args self.args = {**copy.deepcopy(suite2p.default_ops()), **self.args} # Should always exist as either a valid SHA or "unknown build" # if running in docker container. ophys_etl_commit_sha = os.environ.get("OPHYS_ETL_COMMIT_SHA", "local build") self.logger.info(f"OPHYS_ETL_COMMIT_SHA: {ophys_etl_commit_sha}") # determine nbinned from bin_duration and movie_frame_rate_hz if self.args['nbinned'] is None: with h5py.File(self.args['h5py'], 'r') as f: nframes = f['data'].shape[0] bin_size = (self.args['bin_duration'] * self.args['movie_frame_rate_hz']) if bin_size > nframes: raise utils.Suite2PWrapperException( f"The desired frame bin duration " f"({self.args['bin_duration']} " f"seconds) and movie frame rate " f"({self.args['movie_frame_rate_hz']} Hz) " "results in a bin " f"size ({bin_size} frames) larger than the number of " f"actual frames in the movie ({nframes})!") self.args['nbinned'] = int(nframes / bin_size) self.logger.info(f"Movie has {nframes} frames collected at " f"{self.args['movie_frame_rate_hz']} Hz. " "To get a bin duration of " f"{self.args['bin_duration']} " f"seconds, setting nbinned to " f"{self.args['nbinned']}.") # make a tempdir for Suite2P's output with tempfile.TemporaryDirectory(dir=self.args['tmp_dir']) as tdir: self.args['save_path0'] = tdir self.logger.info(f"Running Suite2P with output going to {tdir}") if self.args['movie_frame_rate_hz'] is not None: self.args['fs'] = self.args['movie_frame_rate_hz'] # Make a copy of the args to remove the NumpyArray, refImg, as # numpy.ndarray can't be serialized with json. Converting to list # and writing to the logger causes the output to be unreadable. copy_of_args = copy.deepcopy(self.args) copy_of_args.pop('refImg') msg = f'running Suite2P v{suite2p.version} with args\n' msg += f'{json.dumps(copy_of_args, indent=2, sort_keys=True)}\n' self.logger.info(msg) # If we are using a external reference image (including our own # produced by compute_referece) communicate this in the log. if self.args['force_refImg']: self.logger.info('\tUsing custom reference image: ' f'{self.args["refImg"]}') try: suite2p.run_s2p(self.args) except ValueError as e: if 'no ROIs were found' in str(e): # Save an empty list of ROIs instead of failing np.save(file=str(pathlib.Path(tdir) / 'stat.npy'), arr=[]) else: raise self.logger.info(f"Suite2P complete. Copying output from {tdir} " f"to {self.args['output_dir']}") # copy over specified retained files to the output dir odir = pathlib.Path(self.args['output_dir']) odir.mkdir(parents=True, exist_ok=True) self.now = None if self.args['timestamp']: self.now = datetime.datetime.now().strftime('%Y%m%d%H%M%S%f') output_files = utils.copy_and_add_uid( srcdir=pathlib.Path(tdir), dstdir=odir, basenames=self.args['retain_files'], uid=self.now, use_mv=True) for k, v in output_files.items(): self.logger.info(f"wrote {k} to {v}") outdict = {'output_files': output_files} self.output(outdict, indent=2)
def test_ops_suite2p_version_matches_cli_version_number(capfd): os.system('suite2p --version') captured = capfd.readouterr() ops_version = suite2p.default_ops()['suite2p_version'] assert ops_version in captured.out