Esempio n. 1
0
    def test_surf_fs_asc(self, temp_fn):
        s = surf.generate_sphere(5) * 100

        surf_fs_asc.write(temp_fn, s, overwrite=True)
        t = surf_fs_asc.read(temp_fn)

        assert_array_almost_equal(s.vertices, t.vertices)
        assert_array_almost_equal(s.vertices, t.vertices)

        theta = np.asarray([0, 0., 180.])

        r = s.rotate(theta, unit='deg')

        l2r = surf.get_sphere_left_right_mapping(s, r)
        l2r_expected = [
            0, 1, 2, 6, 5, 4, 3, 11, 10, 9, 8, 7, 15, 14, 13, 12, 16, 19, 18,
            17, 21, 20, 23, 22, 26, 25, 24
        ]

        assert_array_equal(l2r, np.asarray(l2r_expected))

        sides_facing = 'apism'
        for side_facing in sides_facing:
            l, r = surf.reposition_hemisphere_pairs(s + 10., t + (-10.),
                                                    side_facing)

            m = surf.merge(l, r)

            # not sure at the moment why medial rotation
            # messes up - but leave for now
            eps = 666 if side_facing == 'm' else .001
            assert_true((abs(m.center_of_mass) < eps).all())
Esempio n. 2
0
    def test_surf_fs_asc(self, temp_fn):
        s = surf.generate_sphere(5) * 100

        surf_fs_asc.write(temp_fn, s, overwrite=True)
        t = surf_fs_asc.read(temp_fn)

        assert_array_almost_equal(s.vertices, t.vertices)
        assert_array_almost_equal(s.vertices, t.vertices)

        theta = np.asarray([0, 0., 180.])

        r = s.rotate(theta, unit='deg')

        l2r = surf.get_sphere_left_right_mapping(s, r)
        l2r_expected = [0, 1, 2, 6, 5, 4, 3, 11, 10, 9, 8, 7, 15, 14, 13, 12,
                       16, 19, 18, 17, 21, 20, 23, 22, 26, 25, 24]

        assert_array_equal(l2r, np.asarray(l2r_expected))


        sides_facing = 'apism'
        for side_facing in sides_facing:
            l, r = surf.reposition_hemisphere_pairs(s + 10., t + (-10.),
                                              side_facing)

            m = surf.merge(l, r)

            # not sure at the moment why medial rotation
            # messes up - but leave for now
            eps = 666 if side_facing == 'm' else .001
            assert_true((abs(m.center_of_mass) < eps).all())
Esempio n. 3
0
    def test_surf_ring_queryengine(self):
        s = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 4, 5)
        # add second layer
        s2 = surf.merge(s, (s + (.01, 0, 0)))
        ds = Dataset(samples=np.arange(20)[np.newaxis],
                     fa=dict(node_indices=np.arange(39, 0, -2)))
        # add more features (with shared node indices)
        ds3 = hstack((ds, ds, ds))
        radius = 2.5
        inner_radius = 1.0
        # Makes sure it raises error if inner_radius is >= radius
        assert_raises(ValueError,
                      lambda: queryengine.SurfaceRingQueryEngine(surface=s2,
                                                         inner_radius=2.5,
                                                         radius=radius))
        distance_metrics = ('euclidean', 'dijkstra', 'euclidean', 'dijkstra')
        for distance_metric, include_center in zip(distance_metrics, [True, False]*2):
            qe = queryengine.SurfaceRingQueryEngine(surface=s2, radius=radius,
                                inner_radius=inner_radius, distance_metric=distance_metric,
                                include_center=include_center)
            # untrained qe should give errors
            assert_raises(ValueError, lambda: qe.ids)
            assert_raises(ValueError, lambda: qe.query_byid(0))

            # node index out of bounds should give error
            ds_ = ds.copy()
            ds_.fa.node_indices[0] = 100
            assert_raises(ValueError, lambda: qe.train(ds_))

            # lack of node indices should give error
            ds_.fa.pop('node_indices')
            assert_raises(ValueError, lambda: qe.train(ds_))
            # train the qe
            qe.train(ds3)

            for node in np.arange(-1, s2.nvertices + 1):
                if node < 0 or node >= s2.nvertices:
                    assert_raises(KeyError, lambda: qe.query_byid(node))
                    continue

                feature_ids = np.asarray(qe.query_byid(node))
                # node indices relative to ds
                base_ids = feature_ids[feature_ids < 20]
                # should have multiples of 20
                assert_equal(set(feature_ids),
                             set((base_ids[np.newaxis].T + \
                                  [0, 20, 40]).ravel()))

                node_indices = s2.circlearound_n2d(node,
                                    radius, distance_metric or 'dijkstra')

                fa_indices = [fa_index for fa_index, inode in
                              enumerate(ds3.fa.node_indices)
                              if inode in node_indices and node_indices[inode] > inner_radius]
                if include_center and node in ds3.fa.node_indices:
                    fa_indices += np.where(ds3.fa.node_indices == node)[0].tolist()
                assert_equal(set(feature_ids), set(fa_indices))
Esempio n. 4
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = os.path.join(refdir, specfn)
            specs.append(afni_suma_spec.read(specpathfn))

        specs = afni_suma_spec.hemi_pairs_add_views(specs,
                                                    'inflated',
                                                    refdir,
                                                    overwrite=overwrite)
        specs = afni_suma_spec.hemi_pairs_add_views(specs,
                                                    'sphere.reg',
                                                    refdir,
                                                    overwrite=overwrite)

        spec_both = afni_suma_spec.combine_left_right(specs)

        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(
                spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x: os.path.join(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf_fs_asc.read(full_path(fn)) for fn in fns_in]
                surf_merged = surf.merge(*surfs_in)
                if config['overwrite'] or not os.path.exists(
                        full_path(fn_out)):
                    surf_fs_asc.write(surf_merged,
                                      full_path(fn_out),
                                      overwrite=overwrite)
                    print "Merged surfaces written to %s" % fn_out
Esempio n. 5
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = os.path.join(refdir, specfn)
            specs.append(afni_suma_spec.read(specpathfn))

        specs = afni_suma_spec.hemi_pairs_add_views(specs,
                            'inflated', refdir, overwrite=overwrite)
        specs = afni_suma_spec.hemi_pairs_add_views(specs,
                            'sphere.reg', refdir, overwrite=overwrite)


        spec_both = afni_suma_spec.combine_left_right(specs)


        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth, config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged, config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x:os.path.join(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf_fs_asc.read(full_path(fn)) for fn in fns_in]
                surf_merged = surf.merge(*surfs_in)
                if config['overwrite'] or not os.path.exists(full_path(fn_out)):
                    surf_fs_asc.write(surf_merged, full_path(fn_out),
                                                overwrite=overwrite)
                    print "Merged surfaces written to %s" % fn_out
Esempio n. 6
0
    def test_surf_pairs(self):
        o, x, y = map(np.asarray, [(0, 0, 0), (0, 1, 0), (1, 0, 0)])
        d = np.asarray((0, 0, .1))
        n = 10
        s1 = surf.generate_plane(o, x, y, n, n)
        s2 = surf.generate_plane(o + d, x, y, n, n)
        s = surf.merge(s1, s2)

        # try for small surface
        eps = .0000001
        pw = s.pairwise_near_nodes(.5)
        for i in xrange(n ** 2):
            d = pw.pop((i, i + 100))
            assert_array_almost_equal(d, .1)

        assert_true(len(pw) == 0)

        pw = s.pairwise_near_nodes(.5)
        for i in xrange(n ** 2):
            d = pw.pop((i, i + 100))
            assert_array_almost_equal(d, .1)

        assert_true(len(pw) == 0)

        # bigger one
        pw = s.pairwise_near_nodes(1.4)
        for i in xrange(n ** 2):
            p, q = i // n, i % n
            offsets = sum(([] if q == 0 else [-1],
                         [] if q == n - 1 else [+1],
                         [] if p == 0 else [-n],
                         [] if p == n - 1 else [n],
                         [0]), [])
            for offset in offsets:
                ii = i + offset + n ** 2
                d = pw.pop((i, ii))

            assert_true((d < .5) ^ (offset > 0))

        assert_true(len(pw) == 0)
Esempio n. 7
0
    def test_surf_pairs(self):
        o, x, y = map(np.asarray, [(0, 0, 0), (0, 1, 0), (1, 0, 0)])
        d = np.asarray((0, 0, .1))
        n = 10
        s1 = surf.generate_plane(o, x, y, n, n)
        s2 = surf.generate_plane(o + d, x, y, n, n)
        s = surf.merge(s1, s2)

        # try for small surface
        eps = .0000001
        pw = s.pairwise_near_nodes(.5)
        for i in xrange(n ** 2):
            d = pw.pop((i, i + 100))
            assert_array_almost_equal(d, .1)

        assert_true(len(pw) == 0)

        pw = s.pairwise_near_nodes(.5)
        for i in xrange(n ** 2):
            d = pw.pop((i, i + 100))
            assert_array_almost_equal(d, .1)

        assert_true(len(pw) == 0)

        # bigger one
        pw = s.pairwise_near_nodes(1.4)
        for i in xrange(n ** 2):
            p, q = i // n, i % n
            offsets = sum(([] if q == 0 else [-1],
                         [] if q == n - 1 else [+1],
                         [] if p == 0 else [-n],
                         [] if p == n - 1 else [n],
                         [0]), [])
            for offset in offsets:
                ii = i + offset + n ** 2
                d = pw.pop((i, ii))

            assert_true((d < .5) ^ (offset > 0))

        assert_true(len(pw) == 0)
Esempio n. 8
0
    def test_surf_queryengine(self, qefn):
        s = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 4, 5)

        # add second layer
        s2 = surf.merge(s, (s + (.01, 0, 0)))

        ds = Dataset(samples=np.arange(20)[np.newaxis],
                     fa=dict(node_indices=np.arange(39, 0, -2)))

        # add more features (with shared node indices)
        ds3 = hstack((ds, ds, ds))

        radius = 2.5

        # Note: sweepargs it not used to avoid re-generating the same
        #       surface and dataset multiple times.
        for distance_metric in ('euclidean', 'dijkstra', '<illegal>', None):
            builder = lambda: queryengine.SurfaceQueryEngine(
                s2, radius, distance_metric)
            if distance_metric in ('<illegal>', None):
                assert_raises(ValueError, builder)
                continue

            qe = builder()

            # test i/o and ensure that the untrained instance is not trained
            if externals.exists('h5py'):
                h5save(qefn, qe)
                qe = h5load(qefn)

            # untrained qe should give errors
            assert_raises(ValueError, lambda: qe.ids)
            assert_raises(ValueError, lambda: qe.query_byid(0))

            # node index out of bounds should give error
            ds_ = ds.copy()
            ds_.fa.node_indices[0] = 100
            assert_raises(ValueError, lambda: qe.train(ds_))

            # lack of node indices should give error
            ds_.fa.pop('node_indices')
            assert_raises(ValueError, lambda: qe.train(ds_))

            # train the qe
            qe.train(ds3)

            # test i/o and ensure that the loaded instance is trained
            if externals.exists('h5py'):
                h5save(qefn, qe)
                qe = h5load(qefn)

            for node in np.arange(-1, s2.nvertices + 1):
                if node < 0 or node >= s2.nvertices:
                    assert_raises(KeyError, lambda: qe.query_byid(node))
                    continue

                feature_ids = np.asarray(qe.query_byid(node))

                # node indices relative to ds
                base_ids = feature_ids[feature_ids < 20]

                # should have multiples of 20
                assert_equal(set(feature_ids),
                             set((base_ids[np.newaxis].T + \
                                            [0, 20, 40]).ravel()))

                node_indices = list(
                    s2.circlearound_n2d(node, radius, distance_metric
                                        or 'dijkstra'))

                fa_indices = [
                    fa_index
                    for fa_index, node in enumerate(ds3.fa.node_indices)
                    if node in node_indices
                ]

                assert_equal(set(feature_ids), set(fa_indices))

            # smoke tests
            assert_true('SurfaceQueryEngine' in '%s' % qe)
            assert_true('SurfaceQueryEngine' in '%r' % qe)
Esempio n. 9
0
    def test_surf_queryengine(self, qefn):
        s = surf.generate_plane((0, 0, 0), (0, 1, 0), (0, 0, 1), 4, 5)

        # add scond layer
        s2 = surf.merge(s, (s + (.01, 0, 0)))

        ds = Dataset(samples=np.arange(20)[np.newaxis],
                    fa=dict(node_indices=np.arange(39, 0, -2)))

        # add more features (with shared node indices)
        ds3 = hstack((ds, ds, ds))

        radius = 2.5

        # Note: sweepargs it not used to avoid re-generating the same
        #       surface and dataset multiple times.
        for distance_metric in ('euclidean', 'dijkstra', '<illegal>', None):
            builder = lambda: queryengine.SurfaceQueryEngine(s2, radius,
                                                             distance_metric)
            if distance_metric in ('<illegal>', None):
                assert_raises(ValueError, builder)
                continue

            qe = builder()

            # test i/o and ensure that the untrained instance is not trained
            if externals.exists('h5py'):
                fd, qefn = tempfile.mkstemp('qe.hdf5', 'test'); os.close(fd)
                h5save(qefn, qe)
                qe = h5load(qefn)
                os.remove(qefn)


            # untrained qe should give errors
            assert_raises(ValueError, lambda:qe.ids)
            assert_raises(ValueError, lambda:qe.query_byid(0))

            # node index out of bounds should give error
            ds_ = ds.copy()
            ds_.fa.node_indices[0] = 100
            assert_raises(ValueError, lambda: qe.train(ds_))

            # lack of node indices should give error
            ds_.fa.pop('node_indices')
            assert_raises(ValueError, lambda: qe.train(ds_))


            # train the qe
            qe.train(ds3)

            # test i/o and ensure that the loaded instance is trained
            if externals.exists('h5py'):
                h5save(qefn, qe)
                qe = h5load(qefn)

            for node in np.arange(-1, s2.nvertices + 1):
                if node < 0 or node >= s2.nvertices:
                    assert_raises(KeyError, lambda: qe.query_byid(node))
                    continue

                feature_ids = np.asarray(qe.query_byid(node))

                # node indices relative to ds
                base_ids = feature_ids[feature_ids < 20]

                # should have multiples of 20
                assert_equal(set(feature_ids),
                             set((base_ids[np.newaxis].T + \
                                            [0, 20, 40]).ravel()))



                node_indices = list(s2.circlearound_n2d(node,
                                    radius, distance_metric or 'dijkstra'))

                fa_indices = [fa_index for fa_index, node in
                                    enumerate(ds3.fa.node_indices)
                                    if node in node_indices]


                assert_equal(set(feature_ids), set(fa_indices))

            # smoke tests
            assert_true('SurfaceQueryEngine' in '%s' % qe)
            assert_true('SurfaceQueryEngine' in '%r' % qe)
Esempio n. 10
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    ext = format2extension(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = os.path.join(refdir, specfn)
            s = afni_suma_spec.read(specpathfn)

            specs.append(afni_suma_spec.read(specpathfn))

        add_states = ['inflated', 'full.patch.flat', 'sphere.reg']
        add_states_required = [True, False, True]  # flat surface is optional
        for add_state, is_req in zip(add_states, add_states_required):
            has_state = all([
                len(spec.find_surface_from_state(add_state)) == 1
                for spec in specs
            ])

            if not has_state:
                if is_req:
                    error('cannot find state %s' % add_state)
                else:
                    # skip this state
                    print "Optional state %s not found - skipping" % add_state
                    continue

            specs = afni_suma_spec.hemi_pairs_add_views(specs,
                                                        add_state,
                                                        ext,
                                                        refdir,
                                                        overwrite=overwrite)

        spec_both = afni_suma_spec.combine_left_right(specs)

        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(
                spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x: os.path.join(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf.read(full_path(fn)) for fn in fns_in]

                if all(['full.patch.flat' in fn for fn in fns_in]):
                    # left hemi of flat; rotate 180 degrees, reposition again
                    surfs_in[0] = surfs_in[0] * [-1, -1, 1]
                    surfs_in = surf.reposition_hemisphere_pairs(
                        surfs_in[0], surfs_in[1], 'm')

                surf_merged = surf.merge(*surfs_in)

                if config['overwrite'] or not os.path.exists(
                        full_path(fn_out)):
                    surf.write(full_path(fn_out), surf_merged)
                    print "Merged surfaces written to %s" % fn_out
Esempio n. 11
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    ext = format2extension(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = pathjoin(refdir, specfn)
            s = afni_suma_spec.read(specpathfn)

            specs.append(afni_suma_spec.read(specpathfn))

        add_states = ['inflated', 'full.patch.flat', 'sphere.reg']
        add_states_required = [True, False, True] # flat surface is optional
        for add_state, is_req in zip(add_states, add_states_required):
            has_state = all([len(spec.find_surface_from_state(add_state)) == 1
                                    for spec in specs])

            if not has_state:
                if is_req:
                    error('cannot find state %s' % add_state)
                else:
                    # skip this state
                    print "Optional state %s not found - skipping" % add_state
                    continue

            specs = afni_suma_spec.hemi_pairs_add_views(specs,
                            add_state, ext, refdir, overwrite=overwrite)


        spec_both = afni_suma_spec.combine_left_right(specs)


        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth, config['alsuffix'])
        specpathfn = pathjoin(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged, config['alsuffix'])
        specpathfn = pathjoin(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x:pathjoin(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf.read(full_path(fn)) for fn in fns_in]

                if all(['full.patch.flat' in fn for fn in fns_in]):
                    # left hemi of flat; rotate 180 degrees, reposition again
                    surfs_in[0] = surfs_in[0] * [-1, -1, 1]
                    surfs_in = surf.reposition_hemisphere_pairs(surfs_in[0], surfs_in[1], 'm')

                surf_merged = surf.merge(*surfs_in)

                if config['overwrite'] or not os.path.exists(full_path(fn_out)):
                    surf.write(full_path(fn_out), surf_merged)
                    print "Merged surfaces written to %s" % fn_out