def main(): args, kwargs = _parse_args(sys.argv[1:]) if args.profiler or args.line_profiler: from phy.utils.testing import _enable_profiler, _profile prof = _enable_profiler(args.line_profiler) else: prof = None import phy if args.debug: phy.debug() if args.cluster_ids: cluster_ids = list(map(int, args.cluster_ids.split(','))) else: cluster_ids = None if args.command == 'cluster-manual': cmd = ('run_manual(args.file, clustering=args.clustering, ' 'interactive=args.ipython, cluster_ids=cluster_ids)') elif args.command == 'cluster-auto': cmd = ('run_auto(args.file, clustering=args.clustering, ' 'interactive=args.ipython, **kwargs)') elif args.command == 'describe': cmd = 'describe(args.file)' else: raise NotImplementedError() if not prof: exec_(cmd, globals(), locals()) else: _profile(prof, cmd, globals(), locals())
def main(args=None): p = ParserCreator() if args is None: args = sys.argv[1:] elif isinstance(args, string_types): args = args.split(' ') args = p.parse(args) if args is None: return if args.profiler or args.line_profiler: from phy.utils.testing import _enable_profiler, _profile prof = _enable_profiler(args.line_profiler) else: prof = None import phy if args.debug: phy.debug() # Hide the traceback. if args.hide_traceback: def exception_handler(exception_type, exception, traceback): print("{}: {}".format(exception_type.__name__, exception)) sys.excepthook = exception_handler # Activate IPython debugger. if args.pdb: from IPython.core import ultratb sys.excepthook = ultratb.FormattedTB(mode='Verbose', color_scheme='Linux', call_pdb=1, ) func = args.func if func is None: p.parser.print_help() return out = func(args) if not out: return cmd, ns = out if not cmd: return requires_qt = ns.pop('requires_qt', False) requires_vispy = ns.pop('requires_vispy', False) # Default variables in namespace. ns.update(phy=phy, path=args.file) if 'session' in ns: ns['model'] = ns['session'].model # Interactive mode with IPython. if args.ipython: print("\nStarting IPython...") from IPython import start_ipython args_ipy = ["-i", "-c='{}'".format(cmd)] if requires_qt or requires_vispy: # Activate Qt event loop integration with Qt. args_ipy += ["--gui=qt"] start_ipython(args_ipy, user_ns=ns) else: if not prof: exec_(cmd, {}, ns) else: _profile(prof, cmd, {}, ns) if requires_qt: # Launch the Qt app. from phy.gui import run_qt_app run_qt_app() elif requires_vispy: # Launch the VisPy Qt app. from vispy.app import use_app, run use_app('pyqt4') run()
import os.path as op import shutil from pprint import pprint from timeit import default_timer import h5py import numpy as np from numpy.testing import assert_allclose as ac import phy from phy.cluster.manual.store import DiskStore from phy.io.h5 import open_h5 from phy.cluster.manual._utils import _spikes_per_cluster from phy.utils.array import _index_of phy.debug() _store_path = '_store' n_spikes = 2000000 n_channels = 300 n_clusters = 500 # Generate the dataset. def _gen_arr(): arr = np.random.rand(n_spikes, n_channels).astype(np.float32) with open_h5('test', 'w') as f: f.write('/test', arr) def _gen_spike_clusters():
def main(args=None): p = ParserCreator() if args is None: args = sys.argv[1:] elif isinstance(args, string_types): args = args.split(' ') args = p.parse(args) if args is None: return if args.profiler or args.line_profiler: from phy.utils.testing import _enable_profiler, _profile prof = _enable_profiler(args.line_profiler) else: prof = None import phy if args.debug: phy.debug() # Hide the traceback. if args.hide_traceback: def exception_handler(exception_type, exception, traceback): print("{}: {}".format(exception_type.__name__, exception)) sys.excepthook = exception_handler # Activate IPython debugger. if args.pdb: from IPython.core import ultratb sys.excepthook = ultratb.FormattedTB( mode='Verbose', color_scheme='Linux', call_pdb=1, ) func = args.func if func is None: p.parser.print_help() return out = func(args) if not out: return cmd, ns = out if not cmd: return requires_qt = ns.pop('requires_qt', False) requires_vispy = ns.pop('requires_vispy', False) # Default variables in namespace. ns.update(phy=phy, path=args.file) if 'session' in ns: ns['model'] = ns['session'].model # Interactive mode with IPython. if args.ipython: print("\nStarting IPython...") from IPython import start_ipython args_ipy = ["-i", "-c='{}'".format(cmd)] if requires_qt or requires_vispy: # Activate Qt event loop integration with Qt. args_ipy += ["--gui=qt"] start_ipython(args_ipy, user_ns=ns) else: if not prof: exec_(cmd, {}, ns) else: _profile(prof, cmd, {}, ns) if requires_qt: # Launch the Qt app. from phy.gui import run_qt_app run_qt_app() elif requires_vispy: # Launch the VisPy Qt app. from vispy.app import use_app, run use_app('pyqt4') run()
import shutil from pprint import pprint from timeit import default_timer import h5py import numpy as np from numpy.testing import assert_allclose as ac import phy from phy.cluster.manual.store import DiskStore from phy.io.h5 import open_h5 from phy.cluster.manual._utils import _spikes_per_cluster from phy.utils.array import _index_of phy.debug() _store_path = '_store' n_spikes = 2000000 n_channels = 300 n_clusters = 500 # Generate the dataset. def _gen_arr(): arr = np.random.rand(n_spikes, n_channels).astype(np.float32) with open_h5('test', 'w') as f: f.write('/test', arr) def _gen_spike_clusters():