def test_iter_morphology_files(): assert (set(tested.iter_morphology_files(DATA / 'folder')) == { DATA / 'folder' / 'a.h5', DATA / 'folder' / 'b.swc' }) assert (set(tested.iter_morphology_files(str(DATA / 'folder'))) == { DATA / 'folder' / 'a.h5', DATA / 'folder' / 'b.swc' }) assert (set(tested.iter_morphology_files( DATA / 'folder', recursive=True)) == { DATA / 'folder' / 'a.h5', DATA / 'folder' / 'b.swc', DATA / 'folder' / 'subfolder' / 'g.SWC', DATA / 'folder' / 'subfolder' / 'e.h5', }) assert (set( tested.iter_morphology_files(str( DATA / 'folder'), recursive=True)) == { DATA / 'folder' / 'a.h5', DATA / 'folder' / 'b.swc', DATA / 'folder' / 'subfolder' / 'g.SWC', DATA / 'folder' / 'subfolder' / 'e.h5', })
def from_neurondb(cls, neurondb: Union[Path, str], label: str = 'default', morphology_folder: Optional[Union[Path, str]] = None): '''Builds a MorphologyDB from a neurondb.(xml|dat) file Args: neurondb: path to a neurondb.(xml|dat) file label: a unique label to mark all morphologies coming from this neurondb morphology_folder: the location of the morphology files, if None it will default to the neurondb folder Raises: ValueError if the neurondb does not abide by the specification https://bbpteam.epfl.ch/documentation/projects/morphology-repair-workflow/latest/input_files.html#specification ..note:: missing keys are filled with `True` values ''' neurondb = Path(neurondb) if morphology_folder: morphology_folder = Path(morphology_folder) else: morphology_folder = neurondb.parent.resolve() morph_paths = { path.stem: path for path in iter_morphology_files(morphology_folder) } if neurondb.suffix.lower() == '.dat': return cls._from_neurondb_dat(neurondb, morph_paths, label) else: return cls._from_neurondb_xml(neurondb, morph_paths, label)
def folder(input_dir, # pylint: disable=too-many-arguments output_dir, extension, quiet, recenter, nrn_order, single_point_soma, sanitize, ncores): """Convert all morphologies in the folder and its subfolders.""" # pylint: disable=import-outside-toplevel try: import dask.bag as dask_bag except ImportError as e: raise ImportError( 'morph-tool[parallel] is not installed. Run: pip install morph-tool[parallel]' ) from e if quiet: L.setLevel(logging.WARNING) failed_conversions = dask_bag.from_sequence(iter_morphology_files(input_dir), npartitions=ncores).map( _attempt_convert, output_dir=output_dir, extension=extension, recenter=recenter, nrn_order=nrn_order, single_point_soma=single_point_soma, sanitize=sanitize) failed_conversions = list(filter(None, failed_conversions)) if failed_conversions: L.warning('The following morphologies could not be converted: %s', failed_conversions)
def unravel_all(raw_dir, unravelled_dir, raw_planes_dir, unravelled_planes_dir, window_half_length=DEFAULT_WINDOW_HALF_LENGTH): '''Repair all morphologies in input folder ''' if not os.path.exists(raw_planes_dir): raise Exception('{} does not exist'.format(raw_planes_dir)) if not os.path.exists(unravelled_planes_dir): os.mkdir(unravelled_planes_dir) for inputfilename in iter_morphology_files(raw_dir): L.info('Unravelling: %s', inputfilename) outfilename = Path(unravelled_dir, inputfilename.name) raw_plane = CutPlane.from_json( Path(raw_planes_dir, inputfilename.name).with_suffix('.json')) unravelled_plane = Path(unravelled_planes_dir, inputfilename.name).with_suffix('.json') try: neuron, mapping = unravel(str(inputfilename), window_half_length) neuron.write(str(outfilename)) with open(str(unravelled_plane), 'w') as f: json.dump(unravel_plane(raw_plane, mapping).to_json(), f, cls=RepairJSON) except Exception as e: # noqa, pylint: disable=broad-except L.warning('Unravelling %s failed', f) L.warning(e, exc_info=True)
def repair_all(input_dir, output_dir, seed=0, axons=None, cut_points_dir=None, plots_dir=None): '''Repair all morphologies in input folder''' for inputfilename in iter_morphology_files(input_dir): outfilename = Path(output_dir, inputfilename.name) if cut_points_dir: cut_points = pd.read_csv( Path(cut_points_dir, inputfilename.name).with_suffix('.csv')) else: cut_points = None if plots_dir is not None: name = 'neuron_{}.html'.format( Path(inputfilename).stem.replace(' ', '_')) plot_file = str(Path(plots_dir, name)) else: plot_file = None try: repair(inputfilename, outfilename, seed=seed, axons=axons, cut_leaves_coordinates=cut_points, plot_file=plot_file) except Exception as e: # noqa, pylint: disable=broad-except L.warning('%s failed', inputfilename) L.warning(e, exc_info=True)
def test_mapping_NeuroM_section_to_NRN(): repo = os.path.join('/tmp', 'MorphologyRepository') if not os.path.exists(repo): Repo.clone_from( 'ssh://bbpcode.epfl.ch/experiment/MorphologyRepository', repo, depth=1) repo = '/gpfs/bbp.cscs.ch/project/proj68/tmp/NCX-83/one-column-proj64/20190308/synthesis/morphologies/hashed/06' # Concretize list to get tqdm counter files = list( iter_morphology_files(repo, recursive=True, extensions={'asc'})) for f in tqdm(files): try: NeuroM_section_to_NRN_section(str(f)) except ( RawDataError, RuntimeError # raised by neuron if 2 somas ): pass except: print('Error for file: {}'.format(f)) raise
def test_error_annotation_all(): input_dir = Path(PATH, 'test-error-detection') # this ensure morphs are ordered as expected morph_paths = sorted( [str(morph) for morph in iter_morphology_files(input_dir)]) annotations, summaries, markers = annotate_neurolucida_all(morph_paths) assert_equal( summaries, { str(morph_paths[0]): { 'fat end': 1, 'zjump': 1, 'narrow start': 1, 'dangling': 1, 'Multifurcation': 1 }, str(morph_paths[1]): {} }) assert_equal( markers, { str(morph_paths[0]): [{ 'name': 'fat end', 'label': 'Circle3', 'color': 'Blue', 'data': [(7, np.array([[-5., -4., 0., 20.]], dtype=np.float32)) ] }, { 'name': 'zjump', 'label': 'Circle2', 'color': 'Green', 'data': [(2, [ np.array([0., 5., 0., 1.], dtype=np.float32), np.array([0., 5., 40., 1.], dtype=np.float32) ])] }, { 'name': 'narrow start', 'label': 'Circle1', 'color': 'Blue', 'data': [(0, np.array([[0., 5., 0., 1.]], dtype=np.float32))] }, { 'name': 'dangling', 'label': 'Circle6', 'color': 'Magenta', 'data': [(5, [np.array([10., -20., -4., 1.], dtype=np.float32)])] }, { 'name': 'Multifurcation', 'label': 'Circle8', 'color': 'Yellow', 'data': [(0, np.array([[0., 5., 0., 1.]], dtype=np.float32))] }], str(morph_paths[1]): [] })
def folder(input_dir, output_dir, error_summary_file, marker_file): '''Annotate errors on a morphologies in a folder.''' from neuror.sanitize import annotate_neurolucida_all output_dir = Path(output_dir) morph_paths = list(iter_morphology_files(input_dir)) annotations, summaries, markers = annotate_neurolucida_all(morph_paths) for morph_path, annotation in annotations.items(): output_file = output_dir / Path(morph_path).name shutil.copy(morph_path, output_file) with open(output_file, 'a') as morph_file: morph_file.write(annotation) with open(error_summary_file, 'w') as summary_file: json.dump(summaries, summary_file, indent=4, cls=NeuromJSON) with open(marker_file, 'w') as m_file: json.dump(markers, m_file, cls=NeuromJSON)
def folder(input_dir, output_dir, width, display, plane): '''Compute cut planes for all morphology in INPUT_DIR and save them into OUTPUT_DIR See "cut-plane compute --help" for more information''' for inputfilename in iter_morphology_files(input_dir): L.info('Seaching cut plane for file: %s', inputfilename) outfilename = os.path.join(output_dir, inputfilename.with_suffix('.json')) try: _export_cut_plane(inputfilename, outfilename, width, display=display, searched_axes=(plane or ('X', 'Y', 'Z')), fix_position=None) except Exception as e: # noqa, pylint: disable=broad-except L.warning('Cut plane computation for %s failed', inputfilename) L.warning(e, exc_info=True)
def folder(input_dir, output_dir, extension, quiet, recenter, nrn_order, single_point_soma, ncores): '''Convert all morphologies in the folder and its subfolders''' if quiet: L.setLevel(logging.WARNING) failed_conversions = dask_bag.from_sequence( iter_morphology_files(input_dir), npartitions=ncores).map(_attempt_convert, output_dir=output_dir, extension=extension, recenter=recenter, nrn_order=nrn_order, single_point_soma=single_point_soma) failed_conversions = list(filter(None, failed_conversions)) if failed_conversions: L.warning('The following morphologies could not be converted: %s', failed_conversions)
def from_folder(cls, morphology_folder: Union[Path, str], mtypes: Iterable[Tuple[str, str]], label: str = 'default', extension: Optional[str] = None): """Factory method to create a MorphDB object from a folder containing morphologies. Args: morphology_folder: a folder containing morphologies mtypes: a sequence of 2-tuples (morphology name, mtype) label: (optional) a group label to be used to identify the morphlogies from this folder extension: Specify the morphology format to consider, if the folder contains multiple formats Raises: ValueError if the folder contains multiple files with the same name but different extensions and the extension argument has not been provided Returns: MorphDB: an instance of MorphDB. """ files = list( iter_morphology_files( Path(morphology_folder), extensions={extension} if extension else None)) if not extension: duplicates = [ item for item, count in collections.Counter( path.stem for path in files).items() if count > 1 ] if duplicates: raise ValueError( f'Folder {morphology_folder} have multiple morphologies with the same ' 'name but different extensions. This is not supported.\n' f'Duplicate morphogies: {duplicates}\n\n' 'Please provide the extension to use with the arguement: extension' ) paths = {path.stem: path for path in files} return MorphDB( MorphInfo(name, mtype, label=label, path=paths[name]) for name, mtype in mtypes)