def test_translate_fst_neuron_h5(): t = np.array([100.,100.,100.]) nrn = load_neuron(H5_NRN_PATH) tnrn = gtr.translate(nrn, t) _check_fst_nrn_translate(nrn, tnrn, t)
def test_translate_fst_neurite_h5(): t = np.array([100.,100.,100.]) nrn = load_neuron(H5_NRN_PATH) nrt_a = nrn.neurites[0] nrt_b = gtr.translate(nrt_a, t) _check_fst_neurite_translate(nrt_a, nrt_b, t)
def view(input_file, plane, backend): '''A simple neuron viewer''' if backend == 'matplotlib': from neurom.viewer import draw kwargs = { 'mode': '3d' if plane == '3d' else '2d', } if plane != '3d': kwargs['plane'] = plane draw(load_neuron(input_file), **kwargs) else: from neurom.view.plotly import draw draw(load_neuron(input_file), plane=plane) if backend == 'matplotlib': import matplotlib.pyplot as plt plt.show()
def test_get_nonmonotonic_neurites(): n = load_neuron(os.path.join(SWC_PATH, 'Neuron.swc')) nt.assert_equal(len(mt.get_nonmonotonic_neurites(n)), 4) _make_monotonic(n) nt.assert_equal(len(mt.get_nonmonotonic_neurites(n)), 0)
def test_terminal_length_per_neurite(): nrn = nm.load_neuron(os.path.join(SWC_PATH, 'simple.swc')) terminal_distances = np.array(_nf.terminal_path_lengths_per_neurite(nrn)) np.testing.assert_allclose(terminal_distances, np.array([5 + 5., 5 + 6., 4. + 6., 4. + 5])) terminal_distances = np.array(_nf.terminal_path_lengths_per_neurite( nrn, neurite_type=nm.AXON)) np.testing.assert_allclose(terminal_distances, np.array([4. + 6., 4. + 5.]))
def setUp(self): self.ref_nrn = 'swc' self.sec_nrn = nm.load_neuron(os.path.join(SWC_DATA_PATH, SWC_MORPH_FILENAME)) self.sec_nrn_trees = [n.root_node for n in self.sec_nrn.neurites] self.ref_types = [NeuriteType.axon, NeuriteType.basal_dendrite, NeuriteType.basal_dendrite, NeuriteType.apical_dendrite, ]
def test_extract_stats_single_neuron(): nrn = nm.load_neuron(os.path.join(DATA_PATH, 'Neuron.swc')) res = ms.extract_stats(nrn, REF_CONFIG) nt.eq_(res.keys(), REF_OUT.keys()) nt.assert_almost_equal(res['mean_soma_radius'], REF_OUT['mean_soma_radius']) for k in ('all', 'axon', 'basal_dendrite', 'apical_dendrite'): nt.eq_(res[k].keys(), REF_OUT[k].keys()) for kk in res[k].keys(): nt.assert_almost_equal(res[k][kk], REF_OUT[k][kk])
def test_principal_direction_extents(): # test with a realistic neuron nrn = nm.load_neuron(os.path.join(H5_PATH, 'bio_neuron-000.h5')) p_ref = [1672.9694359427331, 142.43704397865031, 226.45895382204986, 415.50612748523838, 429.83008974193206, 165.95410536922873, 346.83281498399697] p = _nf.principal_direction_extents(nrn) _close(np.array(p), np.array(p_ref))
def test_get_flat_neurites(): n = load_neuron(os.path.join(SWC_PATH, 'Neuron.swc')) nt.assert_equal(len(mt.get_flat_neurites(n, 1e-6, method='tolerance')), 0) nt.assert_equal(len(mt.get_flat_neurites(n, 0.1, method='ratio')), 0) n = _make_flat(n) nt.assert_equal(len(mt.get_flat_neurites(n, 1e-6, method='tolerance')), 4) nt.assert_equal(len(mt.get_flat_neurites(n, 0.1, method='ratio')), 4)
def test_extract_stats_single_neuron(): nrn = nm.load_neuron(os.path.join(DATA_PATH, 'Neuron.swc')) res = ms.extract_stats(nrn, REF_CONFIG) nt.eq_(set(res.keys()), set(REF_OUT.keys())) #Note: soma radius is calculated from the sphere that gives the area # of the cylinders described in Neuron.swc nt.assert_almost_equal(res['mean_soma_radius'], REF_OUT['mean_soma_radius']) for k in ('all', 'axon', 'basal_dendrite', 'apical_dendrite'): nt.eq_(set(res[k].keys()), set(REF_OUT[k].keys())) for kk in res[k].keys(): nt.assert_almost_equal(res[k][kk], REF_OUT[k][kk])
def test_section_radial_distances_endpoint(): ref_sec_rad_dist = nf.section_radial_distances(NEURON) rad_dists = fst_get('section_radial_distances', NEURON) nt.eq_(len(rad_dists), 84) nt.ok_(np.all(rad_dists == ref_sec_rad_dist)) nrns = [nm.load_neuron(os.path.join(SWC_PATH, f)) for f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc')] pop = Population(nrns) rad_dist_nrns = [nm.get('section_radial_distances', nrn) for nrn in nrns] rad_dist_pop = nm.get('section_radial_distances', pop) assert_items_equal(rad_dist_pop, rad_dist_nrns)
def test_segment_radial_distances_displaced_neurite(): nrns = [nm.load_neuron(os.path.join(SWC_PATH, f)) for f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc')] pop = Population(nrns) rad_dist_nrns = [] for nrn in nrns: rad_dist_nrns.extend( nm.get('segment_radial_distances', nrn)) rad_dist_nrns = np.array(rad_dist_nrns) rad_dist_pop = nm.get('segment_radial_distances', pop) nt.ok_(np.alltrue(rad_dist_pop == rad_dist_nrns))
def test_read(): with warnings.catch_warnings(record=True): rdw = io.load_data(StringIO(MORPH_ASC), reader='asc') raw_data = rdw.data_block eq_(raw_data.shape, (19, 7)) ok_(np.allclose(raw_data[:, COLS.ID], np.arange(0, 19))) # correct ID # 3 is ID of end of the soma, 2 sections attach to this ok_(np.count_nonzero(raw_data[:, COLS.P] == 3), 2) with warnings.catch_warnings(record=True): neuron = load_neuron(StringIO(MORPH_ASC), reader='asc') assert_array_equal(neuron.neurites[0].root_node.points[:, COLS.XYZ], [[ 0., 5., 0.], [ 2., 9., 0.], [ 0., 13., 0.], [ 2., 13., 0.], [ 4., 13., 0.]])
def test_iter_segments_section(): sec = load_neuron(StringIO(u""" ((CellBody) (0 0 0 2)) ((Dendrite) (1 2 3 8) (5 6 7 16) (8 7 6 10) (4 3 2 2)) """), reader='asc').sections[1] ref = [[p1[COLS.XYZR].tolist(), p2[COLS.XYZR].tolist()] for p1, p2 in core.iter_segments(sec)] assert_array_equal( ref, [[[1, 2, 3, 4], [5, 6, 7, 8]], [[5, 6, 7, 8], [8, 7, 6, 5]], [[8, 7, 6, 5], [4, 3, 2, 1]]])
def test_annotate(): correct_result = """ (Circle1 ; MUK_ANNOTATION (Color Blue) ; MUK_ANNOTATION (Name "narrow start") ; MUK_ANNOTATION ( 0.00 0.00 2.00 0.50) ; MUK_ANNOTATION ) ; MUK_ANNOTATION """ checkers = {has_no_narrow_start: {"name": "narrow start", "label": "Circle1", "color": "Blue"}} neuron = load_neuron(SWC_PATH / 'narrow_start.swc') results = [checker(neuron) for checker in checkers.keys()] assert annotate(results, checkers.values()) == correct_result
def test_layout_dendrogram(): def assert_layout(dendrogram): for i, child in enumerate(dendrogram.children): # child is higher than parent in Y coordinate nt.assert_greater_equal( positions[child][1], positions[dendrogram][1] + dendrogram.height) if i < len(dendrogram.children) - 1: next_child = dendrogram.children[i + 1] # X space between child is enough for their widths nt.assert_greater( positions[next_child][0] - positions[child][0], .5 * (next_child.width + child.width)) assert_layout(child) neuron = load_neuron(NEURON_PATH) dendrogram = dm.Dendrogram(neuron) positions = dm.layout_dendrogram(dendrogram, np.array([0, 0])) assert_layout(dendrogram)
def test_has_no_narrow_dendritic_section(): swc_content = StringIO(u""" # index, type, x, y, z, radius, parent 1 1 0 0 0 10. -1 2 2 0 0 0 10. 1 3 2 0 50 0 10. 2 4 2 -5 51 0 10. 3 5 2 6 53 0 10. 3 6 3 0 0 0 5. 1 # start of the narrow section 7 3 0 -4 0 5. 6 8 3 6 -4 0 10. 7 9 3 -5 -4 0 10. 7 """) nrn = load_neuron(swc_content, reader='swc') res = nrn_chk.has_no_narrow_neurite_section(nrn, dendrite_filter, radius_threshold=5, considered_section_min_length=0) nt.ok_(res.status) res = nrn_chk.has_no_narrow_neurite_section(nrn, dendrite_filter, radius_threshold=7, considered_section_min_length=0) nt.ok_(not res.status) swc_content = StringIO(u""" # index, type, x, y, z, radius, parent 1 1 0 0 0 10. -1 2 2 0 0 0 5 1 # narrow soma 3 2 0 50 0 5 2 4 2 -5 51 0 5 3 5 2 6 53 0 5 3 6 3 0 0 0 5 1 # narrow axon 7 3 0 -4 0 10. 6 8 3 6 -4 0 10. 7 9 3 -5 -4 0 10. 7 """) res = nrn_chk.has_no_narrow_neurite_section(nrn, dendrite_filter, radius_threshold=5, considered_section_min_length=0) nt.ok_(res.status, 'Narrow soma or axons should not raise bad status when checking for narrow dendrites')
def test_color_section(_): neuron = load_neuron(os.path.join(PATH, '..', 'tests', 'data', 'neuron.h5')) # Colorize first section of the neurite builder = NeuronBuilder(neuron, '3d') builder.color_section(neuron.neurites[1].root_node) assert_equal(len(builder.properties.values()), 1) assert_dict_equal(next(iter(builder.properties.values())), { 'color': 'green', 'range': slice(0, 23, None) }) builder.plot() # Colorize alls sections of the neurite builder = NeuronBuilder(neuron, '3d') section = neuron.neurites[2].root_node builder.color_section(section, color='gray', recursive=True) assert_equal(len(builder.properties.values()), 27) builder.plot() # Colorize only a fraction of the section builder = NeuronBuilder(neuron, '3d') builder.color_section(neuron.sections[159], color='black', start_point=20, end_point=120) assert_dict_equal(next(iter(builder.properties.values())), { 'color': 'black', 'range': slice(20, 120, None) }) builder.plot() # 2d builder = NeuronBuilder(neuron, 'xy') builder.color_section(neuron.neurites[1].root_node) assert_equal(len(builder.properties.values()), 1) assert_dict_equal(next(iter(builder.properties.values())), { 'color': 'green', 'range': slice(0, 23, None) }) builder.plot()
def treeToNeuroM(fullState: FullState, tree: Tree) -> Any: result = None tmpFileDir = os.path.basename(__file__) with tempfile.NamedTemporaryFile(mode='wt+', suffix='.swc', prefix=tmpFileDir) as tmpFile: path = os.path.join(tmpFileDir, tmpFile.name) print("Writing temporary SWC to %s" % path) # Step 1: Export to SWC files.exportToSWC(tmpFileDir, tmpFile.name, tree, fullState, forNeuroM=True) # Step 2: Load SWC into NeuroM data type result = nm.load_neuron(path) return result
def test_extract_stats_scalar_feature(): nrn = nm.load_neuron(DATA_PATH / 'neurolucida' / 'bio_neuron-000.asc') config = { 'neurite_type': ['ALL'], 'neurite': { 'n_forking_points': ['max'], }, 'neuron': { 'soma_volume': ['total'], } } res = ms.extract_stats(nrn, config) assert res == { 'all': { 'max_n_forking_point': 277 }, 'neuron': { 'total_soma_volume': 1424.4383771584492 } }
def test_segment_meander_angles_single_section(): feat = 'segment_meander_angles' nrn = nm.load_neuron(StringIO(u"""((CellBody) (0 0 0 0)) ((Dendrite) (0 0 0 2) (1 0 0 2) (1 1 0 2) (2 1 0 2) (2 2 0 2)))"""), reader='asc') nrt = nrn.neurites[0] pop = core.Population([nrn]) ref = [math.pi / 2, math.pi / 2, math.pi / 2] nt.eq_(ref, get_feature(feat, nrt).tolist()) nt.eq_(ref, get_feature(feat, nrn).tolist()) nt.eq_(ref, get_feature(feat, pop).tolist())
def test_tree(): neuron = load_neuron(Path(SWC_PATH, 'simple-different-section-types.swc')) expected_colors = { 'black': np.array([[0., 0., 0., 1.] for _ in range(3)]), None: [[1., 0., 0., 1.], [1., 0., 0., 1.], [0.501961, 0., 0.501961, 1.]] } for input_color, expected_colors in expected_colors.items(): with get_fig_2d() as (fig, ax): tree = neuron.neurites[0] view.plot_tree(ax, tree, color=input_color, diameter_scale=None, alpha=1., linewidth=1.2) collection = ax.collections[0] eq_(collection.get_linewidth()[0], 1.2) assert_array_almost_equal(collection.get_colors(), expected_colors) with get_fig_2d() as (fig, ax): tree = neuron.neurites[0] view.plot_tree(ax, tree, color=input_color, alpha=1., linewidth=1.2, realistic_diameters=True) collection = ax.collections[0] eq_(collection.get_linewidth()[0], 1.0) assert_array_almost_equal(collection.get_facecolors(), expected_colors) with get_fig_2d() as (fig, ax): assert_raises(AssertionError, view.plot_tree, ax, tree, plane='wrong') with get_fig_2d() as (fig, ax): tree = simple_neuron.neurites[0] view.plot_tree(ax, tree) assert_allclose(ax.dataLim.bounds, (-5., 0., 11., 5.), atol=1e-10)
def _check_file(self, f): """Run tests on a morphology file.""" L.info('File: %s', f) full_result = True full_summary = OrderedDict() try: nrn = load_neuron(f) result, summary = self._check_loop(nrn, 'neuron_checks') full_result &= result full_summary.update(summary) except Exception as e: # pylint: disable=W0703 L.error('Check failed: %s', str(type(e)) + str(e.args)) full_result = False full_summary['ALL'] = full_result for m, s in full_summary.items(): self._log_msg(m, s) return full_result, {str(f): full_summary}
def test_segment_radial_distances_origin(): origin = (-100, -200, -300) ref_segs = nf.segment_radial_distances(NEURON) ref_segs_origin = nf.segment_radial_distances(NEURON, origin=origin) rad_dists = fst_get('segment_radial_distances', NEURON) rad_dists_origin = fst_get('segment_radial_distances', NEURON, origin=origin) nt.ok_(np.all(rad_dists == ref_segs)) nt.ok_(np.all(rad_dists_origin == ref_segs_origin)) nt.ok_(np.all(rad_dists_origin != ref_segs)) nrns = [nm.load_neuron(os.path.join(SWC_PATH, f)) for f in ('point_soma_single_neurite.swc', 'point_soma_single_neurite2.swc')] pop = Population(nrns) rad_dist_nrns = [] for nrn in nrns: rad_dist_nrns.extend(nm.get('segment_radial_distances', nrn)) rad_dist_nrns = np.array(rad_dist_nrns) rad_dist_pop = nm.get('segment_radial_distances', pop) assert_allclose(rad_dist_nrns, rad_dist_pop)
def test_legacy_compare_with_legacy_result(): '''Comparing results with the old repair launch with the following commands: repair --dounravel 0 --inputdir /gpfs/bbp.cscs.ch/project/proj83/home/bcoste/release/out-new/01_ConvertMorphologies --input rp120430_P-2_idA --overlap=true --incremental=false --restrict=true --distmethod=mirror The arguments are the one used in the legacy morphology workflow. ''' neuron = load_neuron(DATA_PATH / 'compare-bbpsdk/rp120430_P-2_idA.h5') points, sign = test_module.internal_cut_detection(neuron, 'z') assert_equal(sign, 1) cut_sections = { point_to_section_segment(neuron, point)[0] for point in points } legacy_cut_sections = { 13, 14, 17, 18, 38, 39, 40, 45, 58, 67, 68, 69, 73, 75, 76, 93, 94, 101, 102, 103, 105, 106, 109, 110, 111, 120, 124, 125, 148, 149, 150, 156, 157, 158, 162, 163, 164, 166, 167, 168, 169, 192, 201, 202, 203, 205, 206, 208 } assert_equal(cut_sections, legacy_cut_sections)
def test_skip_header(): """Test that the header does not cause any issue""" str_neuron = """(FilledCircle (Color RGB (64, 0, 128)) (Name "Marker 11") (Set "axons") ( -189.59 55.67 28.68 0.12) ; 1 ) ; End of markers ((Color Yellow) (Axon) (Set "axons") ( 1.2 2.7 1.0 13) ( 1.2 3.7 2.0 13) )""" n = nm.load_neuron(str_neuron, reader='asc') assert len(n.neurites) == 1 assert_array_equal( n.neurites[0].points, np.array([[1.2, 2.7, 1.0, 6.5], [1.2, 3.7, 2.0, 6.5]], dtype=np.float32))
def test_single_neurite_no_soma(): string_section = u""" ( (Color Yellow) (Axon) (Set "axons") ;; An commented line and some empty lines ( 1.2 2.7 1.0 13) ;; Some comment ( 1.2 3.7 2.0 13) Generated ) ; End of tree""" n = nm.load_neuron(string_section, reader='asc') assert_array_equal(n.soma.points, np.empty((0, 4))) assert len(n.neurites) == 1 assert_array_equal( n.neurites[0].points, np.array([[1.2, 2.7, 1.0, 6.5], [1.2, 3.7, 2.0, 6.5]], dtype=np.float32))
def test_empty_sibling(): n = load_neuron(""" ((Dendrite) (3 -4 0 2) (3 -6 0 2) (3 -8 0 2) (3 -10 0 2) ( (3 -10 0 2) (0 -10 0 2) (-3 -10 0 2) | ) ) """, reader='asc') assert_array_equal( n.neurites[0].points, np.array([[3, -4, 0, 1], [3, -6, 0, 1], [3, -8, 0, 1], [3, -10, 0, 1], [0, -10, 0, 1], [-3, -10, 0, 1]], dtype=np.float32))
def test_annotate(): correct_result = """ (Circle1 ; MUK_ANNOTATION (Color Blue) ; MUK_ANNOTATION (Name "narrow start") ; MUK_ANNOTATION (0.0 0.0 0.0 0.50) ; MUK_ANNOTATION (0.0 0.0 0.0 0.50) ; MUK_ANNOTATION ) ; MUK_ANNOTATION """ checkers = { has_no_narrow_start: { "name": "narrow start", "label": "Circle1", "color": "Blue" } } neuron = load_neuron('test_data/swc/Neuron_zero_radius.swc') results = [checker(neuron) for checker in checkers.keys()] nt.assert_equal(annotate(results, checkers.values()), correct_result)
def test_read_with_duplicates(): """Section points are duplicated in the file""" # what I think the # https://developer.humanbrainproject.eu/docs/projects/morphology-documentation/0.0.2/h5v1.html # would look like n = load_neuron(StringIO(with_duplicate), reader='asc') assert len(n.neurites) == 1 assert_array_equal( n.neurites[0].points, # Duplicate points are not present [[3, -4, 0, 1], [3, -6, 0, 1], [3, -8, 0, 1], [3, -10, 0, 1], [0, -10, 0, 1], [-3, -10, 0, 1], [6, -10, 0, 1], [9, -10, 0, 1]]) assert_array_equal( n.neurites[0].root_node.points, [[3, -4, 0, 1], [3, -6, 0, 1], [3, -8, 0, 1], [3, -10, 0, 1]]) assert_array_equal(n.neurites[0].root_node.children[0].points, [[3, -10, 0, 1], [0, -10, 0, 1], [-3, -10, 0, 1]]) assert_array_equal(n.neurites[0].root_node.children[1].points, [[3, -10, 0, 1], [6, -10, 0, 1], [9, -10, 0, 1]])
def test_has_nonzero_soma_radius_bad_data(): nrn = load_neuron(os.path.join(SWC_PATH, 'Single_basal.swc')) nt.assert_false(nrn_chk.has_nonzero_soma_radius(nrn).status)
def setUp(self): self.ref_pop = Population([load_pt_neuron(f, mt.set_tree_type) for f in NRN_PATHS]) self.fst_pop = Population([nm.load_neuron(f) for f in NRN_PATHS]) self.ref_types = [t.type for t in self.ref_pop.neurites]
def setUp(self): super(TestH5V2, self).setUp() self.sec_nrn = nm.load_neuron(os.path.join(H5V2_DATA_PATH, MORPH_FILENAME)) self.sec_nrn_trees = [n.root_node for n in self.sec_nrn.neurites]
def setUp(self): super(TestH5V2, self).setUp() self.sec_nrn = nm.load_neuron( os.path.join(H5V2_DATA_PATH, MORPH_FILENAME)) self.sec_nrn_trees = [n.root_node for n in self.sec_nrn.neurites]
def test_legacy_compare_with_legacy_result(): '''Comparing results with the old repair launch with the following commands: repair --dounravel 0 --inputdir /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/04_ZeroDiameterFix --input rp120430_P-2_idA --overlap=true --incremental=false --restrict=true --distmethod=mirror The arguments are the one used in the legacy morphology workflow. ''' neuron = load_neuron(DATA_PATH / 'compare-bbpsdk/rp120430_P-2_idA.h5') obj = test_module.Repair(inputfile=DATA_PATH / 'compare-bbpsdk/rp120430_P-2_idA.h5', legacy_detection=True) cut_sections = { point_to_section_segment(neuron, point)[0] for point in obj.cut_leaves } legacy_cut_sections = { 13, 14, 17, 18, 38, 39, 40, 45, 58, 67, 68, 69, 73, 75, 76, 93, 94, 101, 102, 103, 105, 106, 109, 110, 111, 120, 124, 125, 148, 149, 150, 156, 157, 158, 162, 163, 164, 166, 167, 168, 169, 192, 201, 202, 203, 205, 206, 208 } assert_equal(cut_sections, legacy_cut_sections) obj._fill_repair_type_map() types = defaultdict(list) for k, v in obj.repair_type_map.items(): types[v].append(k) # offset due to the first section id in the old soft being the soma offset = 1 # These numbers come from the attribute 'apical' from the h5py group 'neuron1' section_id, segment_id = 134, 8 assert_equal(obj.apical_section.id + offset, section_id) assert_equal(len(obj.apical_section.points) - 1, segment_id) assert_array_equal( [section.id + offset for section in types[RepairType.basal]], [ 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132 ]) assert_array_equal( [0] + [section.id + offset for section in types[RepairType.axon]], [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89 ]) assert_array_equal( [section.id + offset for section in types[RepairType.oblique]], [217, 218, 219]) assert_array_equal( [section.id + offset for section in types[RepairType.trunk]], [133, 134]) expected_tufts = { 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216 } actual_tufts = {section.id + offset for section in types[RepairType.tuft]} assert_equal(actual_tufts, expected_tufts) expected_axons = { 1, 2, 77, 3, 70, 4, 59, 5, 46, 6, 41, 7, 40, 8, 39, 9, 38, 10, 19, 11, 16, 12, 15, 13, 14, 17, 18, 20, 37, 21, 34, 22, 31, 23, 28, 24, 27, 25, 26, 29, 30, 32, 33, 35, 36, 42, 45, 43, 44, 47, 58, 48, 53, 49, 52, 50, 51, 54, 55, 56, 57, 60, 69, 61, 66, 62, 63, 64, 65, 67, 68, 71, 76, 72, 75, 73, 74, 78, 83, 79, 82, 80, 81, 84, 85, 86, 89, 87, 88 } actual_axons = {section.id + offset for section in types[RepairType.axon]} assert_equal(actual_axons, expected_axons) intacts = defaultdict(list) for sec in obj._find_intact_sub_trees(): intacts[obj.repair_type_map[sec]].append(sec) assert_equal([sec.id + offset for sec in intacts[RepairType.trunk]], []) assert_equal([sec.id + offset for sec in intacts[RepairType.oblique]], [217]) assert_equal( {sec.id + offset for sec in intacts[RepairType.tuft]}, { 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 151, 152, 153, 154, 155, 159, 160, 161, 165, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 193, 194, 195, 196, 197, 198, 199, 200, 204, 207, 209, 210, 211, 212, 213, 214, 215, 216 })
def test_has_nonzero_soma_radius(): nrn = load_neuron(os.path.join(SWC_PATH, 'Neuron.swc')) nt.assert_true(nrn_chk.has_nonzero_soma_radius(nrn))
def _load_neuron(name): if name.endswith('.swc'): path = os.path.join(SWC_PATH, name) elif name.endswith('.h5'): path = os.path.join(H5V1_PATH, name) return name, load_neuron(path)
def setUp(self): self.ref_nrn = load_pt_neuron(os.path.join(SWC_DATA_PATH, SWC_MORPH_FILENAME), mt.set_tree_type) self.sec_nrn = nm.load_neuron(os.path.join(SWC_DATA_PATH, SWC_MORPH_FILENAME)) self.sec_nrn_trees = [n.root_node for n in self.sec_nrn.neurites] self.ref_types = [n.type for n in self.ref_nrn.neurites]
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''Compatibility between NL and H5 files''' # pylint: disable=protected-access import numpy as np import neurom as nm from neurom.fst import _neuritefunc as _nf nrn_h5 = nm.load_neuron('test_data/h5/v1/bio_neuron-001.h5') nrn_asc = nm.load_neuron('test_data/neurolucida/bio_neuron-001.asc') print 'h5 number of sections: %s' % nm.get('number_of_sections', nrn_h5)[0] print 'nl number of sections: %s\n' % nm.get('number_of_sections', nrn_asc)[0] print 'h5 number of segments: %s' % nm.get('number_of_segments', nrn_h5)[0] print 'nl number of segments: %s\n' % nm.get('number_of_segments', nrn_asc)[0] print 'h5 total neurite length: %s' % np.sum(nm.get('section_lengths', nrn_h5)) print 'nl total neurite length: %s\n' % np.sum(nm.get('section_lengths', nrn_asc)) print 'h5 principal direction extents: %s' % nm.get('principal_direction_extents', nrn_h5) print 'nl principal direction extents: %s' % nm.get('principal_direction_extents', nrn_asc) print '\nNumber of neurites:' for nt in nm.NeuriteType: print nt, _nf.n_neurites(nrn_h5, neurite_type=nt),\ _nf.n_neurites(nrn_asc, neurite_type=nt)
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''Get sections and segments by ID''' import neurom as nm from neurom import morphmath as mm from neurom.core.dataformat import COLS def get_segment(neuron, section_id, segment_id): '''Get a segment given a section and segment id Returns: array of two [x, y, z, r] points defining segment ''' sec = neuron.sections[section_id] return sec.points[segment_id:segment_id + 2][:, 0:4] if __name__ == '__main__': nrn = nm.load_neuron('test_data/h5/v1/Neuron.h5') seg = get_segment(nrn, 3, 2) print 'Segment:\n', seg print 'Mid-point (x, y, z):\n', mm.linear_interpolate(seg[0], seg[1], 0.5) print 'Mid-point R:\n', mm.interpolate_radius(seg[0][COLS.R], seg[1][COLS.R], 0.5)
import numpy as np _path = os.path.dirname(os.path.abspath(__file__)) DATA_PATH = os.path.join(_path, '../test_data') SWC_PATH = os.path.join(DATA_PATH, 'swc') def random_color(): '''Random color generation''' return np.random.rand(3, 1) def plot_somas(somas): '''Plot set of somas on same figure as spheres, each with different color''' _, ax = common.get_figure(new_fig=True, subplot=111, params={'projection': '3d', 'aspect': 'equal'}) for s in somas: common.plot_sphere(ax, s.center, s.radius, color=random_color(), alpha=1) plt.show() if __name__ == '__main__': # define set of files containing relevant neurons file_nms = [os.path.join(SWC_PATH, file_nm) for file_nm in ['Soma_origin.swc', 'Soma_translated_1.swc', 'Soma_translated_2.swc']] # load from file and plot sms = [load_neuron(file_nm).soma for file_nm in file_nms] plot_somas(sms)
import matplotlib if 'DISPLAY' not in os.environ: # noqa matplotlib.use('Agg') # noqa from neurom.view import common, plotly import neurom from neurom import load_neuron, viewer from nose import tools as nt _PWD = os.path.dirname(os.path.abspath(__file__)) DATA_PATH = os.path.join(_PWD, '../../test_data/swc') MORPH_FILENAME = os.path.join(DATA_PATH, 'Neuron.swc') nrn = load_neuron(MORPH_FILENAME) def _reload_module(module): '''Force module reload''' if sys.version_info >= (3, ): import importlib importlib.reload(module) else: reload(module) def test_plotly_extra_not_installed(): with mock.patch.dict(sys.modules, {'plotly': None}): try: _reload_module(neurom.view.plotly)
import os from io import StringIO from os.path import join as joinp from nose import tools as nt from numpy.testing import assert_array_equal import neurom as nm from neurom import COLS, core, load_neuron from neurom._compat import filter from neurom.core import NeuriteIter, Tree _path = os.path.dirname(os.path.abspath(__file__)) DATA_PATH = joinp(_path, '../../../test_data') NRN1 = load_neuron(joinp(DATA_PATH, 'swc/Neuron.swc')) NEURONS = [ NRN1, load_neuron(joinp(DATA_PATH, 'swc/Single_basal.swc')), load_neuron(joinp(DATA_PATH, 'swc/Neuron_small_radius.swc')), load_neuron(joinp(DATA_PATH, 'swc/Neuron_3_random_walker_branches.swc')), ] TOT_NEURITES = sum(len(N.neurites) for N in NEURONS) REVERSED_NEURITES = load_neuron( joinp(DATA_PATH, 'swc/ordering/reversed_NRN_neurite_order.swc')) POP = core.Population(NEURONS, name='foo')
'TDA' ]) thicknesspath = pd.DataFrame( columns=['id', 'species', 'layer', 'pathlengths', 'thicknesses']) for index, file in enumerate(files): print('Analyzing file #' + str(index + 1) + ' out of ' + str(len(files))) # if index>0: # fig.add_subplot(7,10,index+1, sharex=ax, sharey=ax) # plt.axis('off') fnsplit = file[:-4].split('_') nrnid = int(fnsplit[2]) species = fnsplit[0][:-2] layer = fnsplit[1] try: nrn = nm.load_neuron(os.path.join(morphdir, file)) markerfn = [a for a in markerfiles if str(nrnid) in a] if markerfn: markerdata = pd.read_csv(os.path.join(morphdir, markerfn[0])) if 'name' in markerdata.columns: markerdata = markerdata.drop( markerdata[markerdata['name'] == 30].index) else: print('no markerfile found for neuron ' + str(nrnid)) #get neuron stats axon = get_sections(nrn, 'axon') dendBasal = get_sections(nrn, 'basal_dendrite') dendApical = get_sections(nrn, 'apical_dendrite') dend = dendBasal + dendApical
"""Test neurom._neuronfunc functionality""" from nose import tools as nt import os import numpy as np from neurom import fst, load_neuron, NeuriteType from neurom.fst import _neuronfunc as _nf from neurom.core import make_soma, Neurite, Section from neurom.core.population import Population from utils import _close, _equal _PWD = os.path.dirname(os.path.abspath(__file__)) H5_PATH = os.path.join(_PWD, "../../../test_data/h5/v1/") NRN = load_neuron(os.path.join(H5_PATH, "Neuron.h5")) SWC_PATH = os.path.join(_PWD, "../../../test_data/swc") SIMPLE = load_neuron(os.path.join(SWC_PATH, "simple.swc")) def test_soma_surface_area(): ret = _nf.soma_surface_area(SIMPLE) nt.eq_(ret, 12.566370614359172) def test_soma_surface_areas(): ret = _nf.soma_surface_areas(SIMPLE) nt.eq_(ret, [12.566370614359172])
import numpy as np from numpy.testing import assert_allclose import neurom as nm from neurom.geom import convex_hull from neurom.fst import _neuritefunc as _nf from neurom.fst.sectionfunc import section_volume from neurom.core import tree as tr from neurom.core import Section, Neurite, Population from utils import _close, _equal _PWD = os.path.dirname(os.path.abspath(__file__)) H5_PATH = os.path.join(_PWD, '../../../test_data/h5/v1/') DATA_PATH = os.path.join(H5_PATH, 'Neuron.h5') SWC_PATH = os.path.join(_PWD, '../../../test_data/swc') SIMPLE = nm.load_neuron(os.path.join(SWC_PATH, 'simple.swc')) NRN = nm.load_neuron(DATA_PATH) def test_principal_direction_extents(): # test with a realistic neuron nrn = nm.load_neuron(os.path.join(H5_PATH, 'bio_neuron-000.h5')) p_ref = [ 1672.9694359427331, 142.43704397865031, 226.45895382204986, 415.50612748523838, 429.83008974193206, 165.95410536922873, 346.83281498399697 ] p = _nf.principal_direction_extents(nrn) _close(np.array(p), np.array(p_ref))
def test_transform_translate_neuron_h5(): t = np.array([100.,100.,100.]) nrn = load_neuron(H5_NRN_PATH) tnrn = nrn.transform(gtr.Translation(t)) _check_fst_nrn_translate(nrn, tnrn, t)
from morphio import SectionType from neurom import COLS, NeuriteType, load_neuron from nose.tools import assert_dict_equal, assert_equal, ok_ from numpy.testing import assert_array_almost_equal, assert_array_equal import neuror.main as test_module from neuror.main import Action, Repair from neuror.utils import RepairType from .expected_sholl_stats import SHOLL_STATS DATA_PATH = Path(__file__).parent / 'data' SIMPLE_PATH = DATA_PATH / 'simple.swc' SLICE_PATH = DATA_PATH / 'neuron-slice.h5' SIMPLE = load_neuron(SIMPLE_PATH) SLICE = load_neuron(SLICE_PATH) class DummySection: def __init__(self, points, children=None): self.points = np.array(points) self.children = children or [] def test_is_cut_section(): section = SIMPLE.neurites[0].root_node assert_equal(test_module.is_cut_section(section, np.array([[2, 2, 2]])), False) assert_equal(test_module.is_cut_section(section, np.array([[0, 0, 0]])),
def test_rotate_neuron_swc(): nrn_a = load_neuron(SWC_NRN_PATH) nrn_b = gtr.rotate(nrn_a, [0,0,1], math.pi/2.0) rot = gtr._rodrigues_to_dcm([0,0,1], math.pi/2.0) _check_fst_nrn_rotate(nrn_a, nrn_b, rot)
def test_legacy_compare_with_legacy_result2(): '''Comparing results with the old repair launch with the following commands: repair --dounravel 0 --inputdir /gpfs/bbp.cscs.ch/project/proj83/home/gevaert/morph-release/morph_release_old_code-2020-07-27/output/04_ZeroDiameterFix --input vd100714B_idB --overlap=true --incremental=false --restrict=true --distmethod=mirror The arguments are the one used in the legacy morphology workflow. ''' neuron = load_neuron(DATA_PATH / 'compare-bbpsdk/vd100714B_idB.h5') obj = test_module.Repair(inputfile=DATA_PATH / 'compare-bbpsdk/vd100714B_idB.h5', legacy_detection=True) cut_sections = { point_to_section_segment(neuron, point)[0] for point in obj.cut_leaves } legacy_cut_sections = { 62, 64, 65, 69, 73, 77, 78, 85, 87, 88, 89, 91, 93, 94, 115, 116, 119, 120, 125, 126, 130, 133, 136, 137, 138, 140, 142, 144, 145, 147, 150, 151, 152, 159, 165, 171, 172, 175, 177, 179, 180, 182, 184, 188, 191, 200, 202, 204, 205, 207, 208, 209, 211, 215, 217, 218, 219, 220, 238, 239, 241, 247, 248, 250, 251, 252, 253, 256, 257, 258, 261, 262, 264, 266, 267, 283, 288, 289, 290, 291, 293, 294, 295, 316, 318, 320, 322, 324, 326, 328, 330, 331, 337, 338, 339, 340, 343, 344, 345, 351, 357, 359, 362, 363, 371, 372, 375, 377, 378, 384, 385, 386, 387, 388, 390, 391, 394, 416, 426, 427, 429, 430, 431, 438, 439, 440, 441, 453, 466, 468, 470, 471, 481, 486, 487, 488, 489, 527, 528, 529, 533, 534, 538, 540, 541, 543, 545, 548, 549, 551, 572, 573, 574, 576, 577, 581, 583, 584, 588, 595, 596, 598, 599, 602, 607, 608, 609, 610, 613, 614, 615, 617, 620, 622, 623, 624, 626, 637, 639, 640, 645, 647, 648, 649, 650, 653, 654, 665, 666, 667, 670, 677, 678, 679, 680, 689, 691, 693, 694, 703, 716, 717, 721, 723, 725, 726 } assert_equal(cut_sections, legacy_cut_sections) obj._fill_repair_type_map() types = defaultdict(list) for k, v in obj.repair_type_map.items(): types[v].append(k) # offset due to the first section id in the old soft being the soma offset = 1 assert_equal(obj.apical_section, None) assert_equal( {section.id + offset for section in types[RepairType.basal]}, { 650, 651, 668, 671, 702, 719, 652, 655, 653, 654, 656, 663, 657, 662, 658, 661, 659, 660, 664, 667, 665, 666, 669, 670, 672, 681, 673, 680, 674, 679, 675, 678, 676, 677, 682, 695, 683, 684, 685, 686, 687, 690, 688, 689, 691, 692, 693, 694, 696, 699, 697, 698, 700, 701, 703, 704, 705, 706, 707, 710, 708, 709, 711, 718, 712, 713, 714, 715, 716, 717, 720, 727, 721, 722, 723, 724, 725, 726, 728, 735, 729, 730, 731, 734, 732, 733, 736, 737, 738 }) assert_array_equal( [section.id + offset for section in types[RepairType.oblique]], []) assert_array_equal( [section.id + offset for section in types[RepairType.trunk]], []) assert_equal({section.id + offset for section in types[RepairType.tuft]}, set()) intacts = defaultdict(list) for sec in obj._find_intact_sub_trees(): intacts[obj.repair_type_map[sec]].append(sec) # Since there is no apical dendrite, all of those are empty for extended_type in [ RepairType.trunk, RepairType.oblique, RepairType.tuft ]: assert_equal(intacts[extended_type], []) assert_equal({sec.id + offset for sec in intacts[RepairType.basal]}, { 651, 668, 671, 702, 719, 652, 655, 656, 663, 657, 662, 658, 661, 659, 660, 664, 669, 672, 681, 673, 674, 675, 676, 682, 695, 683, 684, 685, 686, 687, 690, 688, 692, 696, 699, 697, 698, 700, 701, 704, 705, 706, 707, 710, 708, 709, 711, 718, 712, 713, 714, 715, 720, 727, 722, 724, 728, 735, 729, 730, 731, 734, 732, 733, 736, 737, 738 })
def test_rotate_neurite_h5(): nrn_a = load_neuron(H5_NRN_PATH) nrt_a = nrn_a.neurites[0] nrt_b = gtr.rotate(nrt_a, [0,0,1], math.pi/2.0) rot = gtr._rodrigues_to_dcm([0,0,1], math.pi/2.0) _check_fst_neurite_rotate(nrt_a, nrt_b, rot)
n_volume_ref = [n.volume for n in iter_neurites(POP, filt=_is_type(NeuriteType.basal_dendrite))] n_volume = fst.get('bar', POP, neurite_type=NeuriteType.basal_dendrite) assert_items_equal(n_volume, n_volume_ref) @nt.raises(NeuroMError) def test_register_existing_feature_raises(): fst.register_neurite_feature('total_length', lambda n: None) _PWD = os.path.dirname(os.path.abspath(__file__)) DATA_PATH = os.path.join(_PWD, '../../../test_data') SWC_PATH = os.path.join(DATA_PATH, 'swc') NEURON_PATH = os.path.join(SWC_PATH, 'Neuron.swc') NEURON = load_neuron(NEURON_PATH) def test_section_lengths(): ref_seclen = [n.length for n in iter_sections(NEURON)] seclen = fst_get('section_lengths', NEURON) nt.eq_(len(seclen), 84) assert_allclose(seclen, ref_seclen) def test_section_lengths_axon(): s = fst_get('section_lengths', NEURON, neurite_type=NeuriteType.axon) nt.eq_(len(s), 21) def test_total_lengths_basal():
def test_transform_rotate_neuron_h5(): rot = gtr.Rotation(ROT_90) nrn_a = load_neuron(H5_NRN_PATH) nrn_b = nrn_a.transform(rot) _check_fst_nrn_rotate(nrn_a, nrn_b, ROT_90)
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os from os.path import join as joinp from nose import tools as nt from neurom.core.population import Population from neurom import load_neuron _path = os.path.dirname(os.path.abspath(__file__)) DATA_PATH = joinp(_path, '../../../test_data') NRN1 = load_neuron(joinp(DATA_PATH, 'swc/Neuron.swc')) NRN2 = load_neuron(joinp(DATA_PATH, 'swc/Single_basal.swc')) NRN3 = load_neuron(joinp(DATA_PATH, 'swc/Neuron_small_radius.swc')) NEURONS = [NRN1, NRN2, NRN3] TOT_NEURITES = sum(len(N.neurites) for N in NEURONS) POP = Population(NEURONS, name='foo') def test_population(): nt.assert_equal(len(POP.neurons), 3) nt.ok_(POP.neurons[0].name, 'Neuron') nt.ok_(POP.neurons[1].name, 'Single_basal') nt.ok_(POP.neurons[2].name, 'Neuron_small_radius') nt.assert_equal(len(POP.somata), 3)
'std': np.std(data), 'min': np.min(data), 'max': np.max(data)} def pprint_stats(data): '''Pretty print summary stats for data''' pprint(stats(data)) if __name__ == '__main__': filename = 'test_data/swc/Neuron.swc' # load a neuron from an SWC file nrn = nm.load_neuron(filename) # Get some soma information # Soma radius and surface area print("Soma radius", nm.get('soma_radii', nrn)[0]) print("Soma surface area", nm.get('soma_surface_areas', nrn)[0]) # Get information about neurites # Most neurite data can be queried for a particular type of neurite. # The allowed types are members of the NeuriteType enumeration. # NEURITE_TYPES is a list of valid neurite types. # We start by calling methods for different neurite types separately # to warm up... # number of neurites