def test_load_trees_good_neuron(): '''Check trees in good neuron are the same as trees from loaded neuron''' filepath = os.path.join(SWC_PATH, 'Neuron.swc') nrn = utils.load_neuron(filepath) trees = utils.load_trees(filepath) nt.eq_(len(nrn.neurites), 4) nt.eq_(len(nrn.neurites), len(trees)) nrn2 = MockNeuron(trees) @pts.point_function(as_tree=False) def elem(point): return point # Check data are the same in tree collection and neuron's neurites for a, b in izip(iter_neurites(nrn, elem), iter_neurites(nrn2, elem)): nt.ok_(np.all(a == b))
def test_load_neuron_no_consecutive_ids_loads(): utils.load_neuron(NON_CONSECUTIVE_ID_FILE);
def test_load_neuron_invalid_id_sequence_raises(): utils.load_neuron(INVALID_ID_SEQUENCE_FILE);
def test_load_neuron_no_soma_raises_SomaError(): utils.load_neuron(NO_SOMA_FILE)
def test_load_neuron_missing_parents_raises(): utils.load_neuron(MISSING_PARENTS_FILE)
def test_load_neuron_disconnected_points_raises(): utils.load_neuron(DISCONNECTED_POINTS_FILE)
def test_load_neuron_deep_neuron(): '''make sure that neurons with deep (ie: larger than the python recursion limit can be loaded) ''' deep_neuron = os.path.join(DATA_PATH, 'h5/v1/deep_neuron.h5') utils.load_neuron(deep_neuron)
def test_load_neuron(): nrn = utils.load_neuron(FILES[0]) nt.ok_(nrn.name == FILES[0].strip('.swc').split('/')[-1])
from nose import tools as nt import os import numpy as np from neurom import fst, load_neuron, NeuriteType from neurom.fst import _neuronfunc as _nf from neurom.point_neurite.io import utils as io_utils from neurom.core import make_soma, Neurite, Section from neurom.core.population import Population _PWD = os.path.dirname(os.path.abspath(__file__)) H5_PATH = os.path.join(_PWD, '../../../test_data/h5/v1/') DATA_PATH = os.path.join(H5_PATH, 'Neuron.h5') NRN = load_neuron(DATA_PATH) NRN_OLD = io_utils.load_neuron(DATA_PATH) def _equal(a, b, debug=False): if debug: print '\na.shape: %s\nb.shape: %s\n' % (a.shape, b.shape) print '\na: %s\nb:%s\n' % (a, b) nt.assert_equal(len(a), len(b)) nt.assert_true(np.alltrue(a == b)) def _close(a, b, debug=False): if debug: print '\na.shape: %s\nb.shape: %s\n' % (a.shape, b.shape) print '\na: %s\nb:%s\n' % (a, b) print '\na - b:%s\n' % (a - b)
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''Compatibility between NL and H5 files''' import numpy as np from neurom.point_neurite.io.utils import load_neuron from neurom.point_neurite.features import get from neurom import log # pylint: disable=unused-import nrn_h5 = load_neuron('test_data/h5/v1/bio_neuron-001.h5') nrn_asc = load_neuron('test_data/neurolucida/bio_neuron-001.asc') print 'h5 number of sections: %s' % get('number_of_sections', nrn_h5)[0] print 'nl number of sections: %s\n' % get('number_of_sections', nrn_asc)[0] print 'h5 number of segments: %s' % get('number_of_segments', nrn_h5)[0] print 'nl number of segments: %s\n' % get('number_of_segments', nrn_asc)[0] print 'h5 total neurite surface area: %s' % np.sum(get('section_areas', nrn_h5)) print 'nl total neurite surface area: %s\n' % np.sum(get('section_areas', nrn_asc)) print 'h5 total neurite volume: %s' % np.sum(get('section_volumes', nrn_h5)) print 'nl total neurite volume: %s\n' % np.sum(get('section_volumes', nrn_asc)) print 'h5 total neurite length: %s' % np.sum(get('section_lengths', nrn_h5)) print 'nl total neurite length: %s\n' % np.sum(get('section_lengths', nrn_asc))