def test_get_files_by_path(): single_neurom = utils.get_files_by_path(NO_SOMA_FILE) nt.eq_(len(single_neurom), 1) neuron_dir = utils.get_files_by_path(VALID_DATA_PATH) nt.eq_(len(neuron_dir), 6) nt.assert_raises(IOError, utils.get_files_by_path, 'this/is/a/fake/path')
def test_get_files_by_path(): single_neurom = utils.get_files_by_path(NO_SOMA_FILE) assert len(single_neurom) == 1 neuron_dir = utils.get_files_by_path(VALID_DATA_PATH) assert len(neuron_dir) == 4 with pytest.raises(IOError): utils.get_files_by_path(Path('this/is/a/fake/path'))
def main(datapath, config, output_file, is_full_config, as_population, ignored_exceptions): """Main function that get statistics for morphologies. Args: datapath (str|Path): path to a morphology file or folder config (str|Path): path to a statistics config file output_file (str|Path): path to output the resulted statistics file is_full_config (bool): should be statistics made over all possible features, modes, neurites as_population (bool): treat ``datapath`` as directory of morphologies population ignored_exceptions (list|tuple|None): exceptions to ignore when loading a morphology """ if is_full_config: config = full_config() else: try: config = get_config(config, EXAMPLE_CONFIG) config = sanitize_config(config) except ConfigError as e: L.error(e) raise if ignored_exceptions is None: ignored_exceptions = () ignored_exceptions = tuple(IGNORABLE_EXCEPTIONS[k] for k in ignored_exceptions) neurons = nm.load_neurons(get_files_by_path(datapath), ignored_exceptions=ignored_exceptions) results = {} if as_population: results[datapath] = extract_stats(neurons, config) else: for neuron in tqdm(neurons): results[neuron.name] = extract_stats(neuron, config) if not output_file: print( json.dumps(results, indent=2, separators=(',', ':'), cls=NeuromJSON)) elif output_file.endswith('.json'): with open(output_file, 'w') as f: json.dump(results, f, cls=NeuromJSON) else: with open(output_file, 'w') as f: csvwriter = csv.writer(f) header = get_header(results) csvwriter.writerow(header) for line in generate_flattened_dict(header, dict(results)): csvwriter.writerow(line)
def run(self, path): """Test a bunch of files and return a summary JSON report.""" SEPARATOR = '=' * 40 summary = {} res = True for _f in utils.get_files_by_path(path): L.info(SEPARATOR) status, summ = self._check_file(_f) res &= status if summ is not None: summary.update(summ) L.info(SEPARATOR) status = 'PASS' if res else 'FAIL' return {'files': summary, 'STATUS': status}
def run(self, path): '''Test a bunch of files and return a summary JSON report''' SEPARATOR = '=' * 40 summary = {} res = True for _f in utils.get_files_by_path(path): L.info(SEPARATOR) status, summ = self._check_file(_f) res &= status if summ is not None: summary.update(summ) L.info(SEPARATOR) status = 'PASS' if res else 'FAIL' return {'files': summary, 'STATUS': status}