Esempio n. 1
0
def load_surface(input_fn,
                 freesurfer_dir,
                 which='pial',
                 fsaverage='fsaverage6'):
    """Automatically load pial surface given infile"""
    hemi = re.findall('hemi-([LR])', input_fn)[0]
    surf_fn = '{0}h.{1}.gii'.format(hemi.lower(), which)
    surf_fn = os.path.join(freesurfer_dir, fsaverage, 'SUMA', surf_fn)
    print("Loading {}".format(surf_fn))
    return surf.read(surf_fn)
Esempio n. 2
0
    def test_surf_voxel_selection(self):
        vol_shape = (10, 10, 10)
        vol_affine = np.identity(4)
        vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5

        vg = volgeom.VolGeom(vol_shape, vol_affine)

        density = 10

        outer = surf.generate_sphere(density) * 25. + 15
        inner = surf.generate_sphere(density) * 20. + 15

        vs = volsurf.VolSurfMaximalMapping(vg, outer, inner)

        nv = outer.nvertices

        # select under variety of parameters
        # parameters are distance metric (dijkstra or euclidean),
        # radius, and number of searchlight  centers
        params = [('d', 1., 10), ('d', 1., 50), ('d', 1., 100), ('d', 2., 100),
                  ('e', 2., 100), ('d', 2., 100), ('d', 20, 100),
                  ('euclidean', 5, None), ('dijkstra', 10, None)]

        # function that indicates for which parameters the full test is run
        test_full = lambda x: len(x[0]) > 1 or x[2] == 100

        expected_labs = ['grey_matter_position', 'center_distances']

        voxcount = []
        tested_double_features = False
        for param in params:
            distance_metric, radius, ncenters = param
            srcs = range(0, nv, nv // (ncenters or nv))
            sel = surf_voxel_selection.voxel_selection(
                vs,
                radius,
                source_surf_nodes=srcs,
                distance_metric=distance_metric)

            # see how many voxels were selected
            vg = sel.volgeom
            datalin = np.zeros((vg.nvoxels, 1))

            mp = sel
            for k, idxs in mp.iteritems():
                if idxs is not None:
                    datalin[idxs] = 1

            voxcount.append(np.sum(datalin))

            if test_full(param):
                assert_equal(np.sum(datalin), np.sum(sel.get_mask()))

                assert_true(len('%s%r' % (sel, sel)) > 0)

                # see if voxels containing inner and outer
                # nodes were selected
                for sf in [inner, outer]:
                    for k, idxs in mp.iteritems():
                        xyz = np.reshape(sf.vertices[k, :], (1, 3))
                        linidx = vg.xyz2lin(xyz)

                        # only required if xyz is actually within the volume
                        assert_equal(linidx in idxs, vg.contains_lin(linidx))

                # check that it has all the attributes
                labs = sel.aux_keys()

                assert_true(all([lab in labs for lab in expected_labs]))

                if externals.exists('h5py'):
                    # some I/O testing
                    fd, fn = tempfile.mkstemp('.h5py', 'test')
                    os.close(fd)
                    h5save(fn, sel)

                    sel2 = h5load(fn)
                    os.remove(fn)

                    assert_equal(sel, sel2)
                else:
                    sel2 = sel

                # check that mask is OK even after I/O
                assert_array_equal(sel.get_mask(), sel2.get_mask())

                # test I/O with surfaces
                # XXX the @tempfile decorator only supports a single filename
                #     hence this method does not use it
                fd, outerfn = tempfile.mkstemp('outer.asc', 'test')
                os.close(fd)
                fd, innerfn = tempfile.mkstemp('inner.asc', 'test')
                os.close(fd)
                fd, volfn = tempfile.mkstemp('vol.nii', 'test')
                os.close(fd)

                surf.write(outerfn, outer, overwrite=True)
                surf.write(innerfn, inner, overwrite=True)

                img = sel.volgeom.get_empty_nifti_image()
                img.to_filename(volfn)

                sel3 = surf_voxel_selection.run_voxel_selection(
                    radius,
                    volfn,
                    innerfn,
                    outerfn,
                    source_surf_nodes=srcs,
                    distance_metric=distance_metric)

                outer4 = surf.read(outerfn)
                inner4 = surf.read(innerfn)
                vsm4 = vs = volsurf.VolSurfMaximalMapping(vg, inner4, outer4)

                # check that two ways of voxel selection match
                sel4 = surf_voxel_selection.voxel_selection(
                    vsm4,
                    radius,
                    source_surf_nodes=srcs,
                    distance_metric=distance_metric)

                assert_equal(sel3, sel4)

                os.remove(outerfn)
                os.remove(innerfn)
                os.remove(volfn)

                # compare sel3 with other selection results
                # NOTE: which voxels are precisely selected by sel can be quite
                #       off from those in sel3, as writing the surfaces imposes
                #       rounding errors and the sphere is very symmetric, which
                #       means that different neighboring nodes are selected
                #       to select a certain number of voxels.
                sel3cmp_difference_ratio = [(sel, .2), (sel4, 0.)]
                for selcmp, ratio in sel3cmp_difference_ratio:
                    nunion = ndiff = 0

                    for k in selcmp.keys():
                        p = set(sel3.get(k))
                        q = set(selcmp.get(k))
                        nunion += len(p.union(q))
                        ndiff += len(p.symmetric_difference(q))

                    assert_true(float(ndiff) / float(nunion) <= ratio)

                # check searchlight call
                # as of late Aug 2012, this is with the fancy query engine
                # as implemented by Yarik

                mask = sel.get_mask()
                keys = None if ncenters is None else sel.keys()

                dset_data = np.reshape(np.arange(vg.nvoxels), vg.shape)
                dset_img = nb.Nifti1Image(dset_data, vg.affine)
                dset = fmri_dataset(samples=dset_img, mask=mask)

                qe = queryengine.SurfaceVerticesQueryEngine(
                    sel,
                    # you can optionally add additional
                    # information about each near-disk-voxels
                    add_fa=['center_distances', 'grey_matter_position'])

                # test i/o ensuring that when loading it is still trained
                if externals.exists('h5py'):
                    fd, qefn = tempfile.mkstemp('qe.hdf5', 'test')
                    os.close(fd)
                    h5save(qefn, qe)
                    qe = h5load(qefn)
                    os.remove(qefn)

                assert_false('ERROR' in repr(qe))  #  to check if repr works
                voxelcounter = _Voxel_Count_Measure()
                searchlight = Searchlight(
                    voxelcounter,
                    queryengine=qe,
                    roi_ids=keys,
                    nproc=1,
                    enable_ca=['roi_feature_ids', 'roi_center_ids'])
                sl_dset = searchlight(dset)

                selected_count = sl_dset.samples[0, :]
                mp = sel
                for i, k in enumerate(sel.keys()):
                    # check that number of selected voxels matches
                    assert_equal(selected_count[i], len(mp[k]))

                assert_equal(searchlight.ca.roi_center_ids, sel.keys())

                assert_array_equal(sl_dset.fa['center_ids'], qe.ids)

                # check nearest node is *really* the nearest node

                allvx = sel.get_targets()
                intermediate = outer * .5 + inner * .5

                for vx in allvx:
                    nearest = sel.target2nearest_source(vx)

                    xyz = intermediate.vertices[nearest, :]
                    sqsum = np.sum((xyz - intermediate.vertices)**2, 1)

                    idx = np.argmin(sqsum)
                    assert_equal(idx, nearest)

                if not tested_double_features:  # test only once
                    # see if we have multiple features for the same voxel, we would get them all
                    dset1 = dset.copy()
                    dset1.fa['dset'] = [1]
                    dset2 = dset.copy()
                    dset2.fa['dset'] = [2]
                    dset_ = hstack((dset1, dset2), 'drop_nonunique')
                    dset_.sa = dset1.sa
                    # dset_.a.imghdr = dset1.a.imghdr
                    assert_true('imghdr' in dset_.a.keys())
                    assert_equal(dset_.a['imghdr'].value,
                                 dset1.a['imghdr'].value)
                    roi_feature_ids = searchlight.ca.roi_feature_ids
                    sl_dset_ = searchlight(dset_)
                    # and we should get twice the counts
                    assert_array_equal(sl_dset_.samples, sl_dset.samples * 2)

                    # compare old and new roi_feature_ids
                    assert (len(roi_feature_ids) == len(
                        searchlight.ca.roi_feature_ids))
                    nfeatures = dset.nfeatures
                    for old, new in zip(roi_feature_ids,
                                        searchlight.ca.roi_feature_ids):
                        # each new ids should comprise of old ones + (old + nfeatures)
                        # since we hstack'ed two datasets
                        assert_array_equal(
                            np.hstack([(x, x + nfeatures) for x in old]), new)
                    tested_double_features = True

        # check whether number of voxels were selected is as expected
        expected_voxcount = [22, 93, 183, 183, 183, 183, 183, 183, 183]

        assert_equal(voxcount, expected_voxcount)
Esempio n. 3
0
    def test_surf(self, temp_fn):
        """Some simple testing with surfaces
        """

        s = surf.generate_sphere(10)

        assert_true(s.nvertices == 102)
        assert_true(s.nfaces == 200)

        v = s.vertices
        f = s.faces

        assert_true(v.shape == (102, 3))
        assert_true(f.shape == (200, 3))

        # another surface
        t = s * 10 + 2
        assert_true(t.same_topology(s))
        assert_array_equal(f, t.faces)

        assert_array_equal(v * 10 + 2, t.vertices)

        # allow updating, but should not affect original array
        # CHECKME: maybe we want to throw an exception instead
        assert_true((v * 10 + 2 == t.vertices).all().all())
        assert_true((s.vertices * 10 + 2 == t.vertices).all().all())

        # a few checks on vertices and nodes
        v_check = {
            40: (0.86511144, -0.28109175, -0.41541501),
            10: (0.08706015, -0.26794358, -0.95949297)
        }
        f_check = {10: (7, 8, 1), 40: (30, 31, 21)}

        vf_checks = [(v_check, lambda x: x.vertices),
                     (f_check, lambda x: x.faces)]

        eps = .0001
        for cmap, f in vf_checks:
            for k, v in cmap.iteritems():
                surfval = f(s)[k, :]
                assert_true((abs(surfval - v) < eps).all())

        # make sure same topology fails with different topology
        u = surf.generate_cube()
        assert_false(u.same_topology(s))

        # check that neighbours are computed correctly
        # even if we nuke the topology afterwards
        for _ in [0, 1]:
            nbrs = s.neighbors
            n_check = [(0, 96, 0.284629), (40, 39, 0.56218349),
                       (100, 99, 0.1741202)]
            for i, j, k in n_check:
                assert_true(abs(nbrs[i][j] - k) < eps)

        def assign_zero(x):
            x.faces[:, :] = 0
            return None

        assert_raises((ValueError, RuntimeError), assign_zero, s)

        # see if mapping to high res works
        h = surf.generate_sphere(40)

        low2high = s.map_to_high_resolution_surf(h, .1)
        partmap = {7: 141, 8: 144, 9: 148, 10: 153, 11: 157, 12: 281}
        for k, v in partmap.iteritems():
            assert_true(low2high[k] == v)

        # ensure that slow implementation gives same results as fast one
        low2high_slow = s.map_to_high_resolution_surf(h, .1)
        for k, v in low2high.iteritems():
            assert_true(low2high_slow[k] == v)

        #  should fail if epsilon is too small
        assert_raises(ValueError,
                      lambda x: x.map_to_high_resolution_surf(h, .01), s)

        n2f = s.node2faces
        for i in xrange(s.nvertices):
            nf = [10] if i < 2 else [5, 6]  # number of faces expected

            assert_true(len(n2f[i]) in nf)

        # test dijkstra distances
        ds2 = s.dijkstra_distance(2)
        some_ds = {
            0: 3.613173280799,
            1: 0.2846296765,
            2: 0.,
            52: 1.87458018,
            53: 2.0487004817,
            54: 2.222820777,
            99: 3.32854360,
            100: 3.328543604,
            101: 3.3285436042
        }

        eps = np.finfo('f').eps
        for k, v in some_ds.iteritems():
            assert_true(abs(v - ds2[k]) < eps)

        # test I/O (through ascii files)
        surf.write(temp_fn, s, overwrite=True)
        s2 = surf.read(temp_fn)

        # test i/o and ensure that the loaded instance is trained
        if externals.exists('h5py'):
            h5save(temp_fn, s2)
            s2 = h5load(temp_fn)

        assert_array_almost_equal(s.vertices, s2.vertices, 4)
        assert_array_almost_equal(s.faces, s2.faces, 4)

        # test plane (new feature end of August 2012)
        s3 = surf.generate_plane((0, 0, 0), (2, 0, 0), (0, 1, 0), 10, 20)
        assert_equal(s3.nvertices, 200)
        assert_equal(s3.nfaces, 342)
        assert_array_almost_equal(s3.vertices[-1, :], np.array([18., 19, 0.]))
        assert_array_almost_equal(s3.faces[-1, :], np.array([199, 198, 179]))

        # test bar
        p, q = (0, 0, 0), (100, 0, 0)
        s4 = surf.generate_bar(p, q, 10, 12)
        assert_equal(s4.nvertices, 26)
        assert_equal(s4.nfaces, 48)
Esempio n. 4
0
    def test_surf_gifti(self, fn):
        # From section 14.4 in GIFTI Surface Data Format Version 1.0
        # (with some adoptions)

        test_data = '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE GIFTI SYSTEM "http://www.nitrc.org/frs/download.php/1594/gifti.dtd">
<GIFTI
  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:noNamespaceSchemaLocation="http://www.nitrc.org/frs/download.php/1303/GIFTI_Caret.xsd"
  Version="1.0"
  NumberOfDataArrays="2">
<MetaData>
  <MD>
    <Name><![CDATA[date]]></Name>
    <Value><![CDATA[Thu Nov 15 09:05:22 2007]]></Value>
  </MD>
</MetaData>
<LabelTable/>
<DataArray Intent="NIFTI_INTENT_POINTSET"
  DataType="NIFTI_TYPE_FLOAT32"
  ArrayIndexingOrder="RowMajorOrder"
  Dimensionality="2"
  Dim0="4"
  Dim1="3"
  Encoding="ASCII"
  Endian="LittleEndian"
  ExternalFileName=""
  ExternalFileOffset="">
<CoordinateSystemTransformMatrix>
  <DataSpace><![CDATA[NIFTI_XFORM_TALAIRACH]]></DataSpace>
  <TransformedSpace><![CDATA[NIFTI_XFORM_TALAIRACH]]></TransformedSpace>
  <MatrixData>
    1.000000 0.000000 0.000000 0.000000
    0.000000 1.000000 0.000000 0.000000
    0.000000 0.000000 1.000000 0.000000
    0.000000 0.000000 0.000000 1.000000
  </MatrixData>
</CoordinateSystemTransformMatrix>
<Data>
  10.5 0 0
  0 20.5 0
  0 0 30.5
  0 0 0
</Data>
</DataArray>
<DataArray Intent="NIFTI_INTENT_TRIANGLE"
  DataType="NIFTI_TYPE_INT32"
  ArrayIndexingOrder="RowMajorOrder"
  Dimensionality="2"
  Dim0="4"
  Dim1="3"
  Encoding="ASCII"
  Endian="LittleEndian"
  ExternalFileName="" ExternalFileOffset="">
<Data>
0 1 2
1 2 3
0 1 3
0 2 3
</Data>
</DataArray>
</GIFTI>'''

        with open(fn, 'w') as f:
            f.write(test_data)

        # test I/O
        s = surf.read(fn)
        surf.write(fn, s)
        s = surf.read(fn)

        v = np.zeros((4, 3))
        v[0, 0] = 10.5
        v[1, 1] = 20.5
        v[2, 2] = 30.5

        f = np.asarray([[0, 1, 2], [1, 2, 3], [0, 1, 3], [0, 2, 3]],
                       dtype=np.int32)

        assert_array_equal(s.vertices, v)
        assert_array_equal(s.faces, f)
Esempio n. 5
0
    print 'Loading and zscore %s hemisphere template took %s seconds' % (hemi, tttt)

# Run Hyperalignment per hemisphere:
hypsource = {}
finalproj = {}
for hemi in ['L','R']:
    # Load connectvity profile:
    t1 = time.time()
    ds = corr[:,ind[hemi]]
    source = Dataset(ds)
    zscore(source,chunks_attr=None)
    # Set 'node_indices' for hyperalignment function:
    source.fa['node_indices'] = range(len(ind[hemi]))
    source = source[:,maskimg[hemi]]
    # Read surface for searchlight vertices:
    s = surf.read('%s/resources/10k/S1200.%s.midthickness.10k_fs_LR.surf.gii' % (tempdir,hemi))
    surfsel = SurfaceQueryEngine(surface=s,radius=12,distance_metric='dijkstra',fa_node_key='node_indices')
    surfsel.train(source)
    nfeatures = source.shape[1]
    # Searchlight hyperalignment on connectomes:
    projmat = np.zeros([nfeatures,nfeatures])
    [hypalign(source,target[hemi],node,surfsel,projmat,maskimg[hemi]) for node in maskimg[hemi]]
    normproj = projmat.copy()
    # Normalise projection matrices to keep scale of values:
    for ii in range(len(projmat)):
        nnz = np.count_nonzero(normproj[:, ii])
        nnz = np.float(nnz)
        normproj[:, ii] = normproj[:, ii]/nnz
    normproj = np.nan_to_num(normproj)
    dd = np.asmatrix(source.samples)
    dd = dd - np.mean(dd,axis=0)
Esempio n. 6
0
    def test_surf_voxel_selection(self):
        vol_shape = (10, 10, 10)
        vol_affine = np.identity(4)
        vol_affine[0, 0] = vol_affine[1, 1] = vol_affine[2, 2] = 5

        vg = volgeom.VolGeom(vol_shape, vol_affine)

        density = 10

        outer = surf.generate_sphere(density) * 25. + 15
        inner = surf.generate_sphere(density) * 20. + 15

        vs = volsurf.VolSurfMaximalMapping(vg, outer, inner)

        nv = outer.nvertices

        # select under variety of parameters
        # parameters are distance metric (dijkstra or euclidean),
        # radius, and number of searchlight  centers
        params = [('d', 1., 10), ('d', 1., 50), ('d', 1., 100), ('d', 2., 100),
                  ('e', 2., 100), ('d', 2., 100), ('d', 20, 100),
                  ('euclidean', 5, None), ('dijkstra', 10, None)]

        # function that indicates for which parameters the full test is run
        test_full = lambda x:len(x[0]) > 1 or x[2] == 100

        expected_labs = ['grey_matter_position',
                         'center_distances']

        voxcount = []
        tested_double_features = False
        for param in params:
            distance_metric, radius, ncenters = param
            srcs = range(0, nv, nv // (ncenters or nv))
            sel = surf_voxel_selection.voxel_selection(vs, radius,
                                            source_surf_nodes=srcs,
                                            distance_metric=distance_metric)

            # see how many voxels were selected
            vg = sel.volgeom
            datalin = np.zeros((vg.nvoxels, 1))

            mp = sel
            for k, idxs in mp.iteritems():
                if idxs is not None:
                    datalin[idxs] = 1

            voxcount.append(np.sum(datalin))

            if test_full(param):
                assert_equal(np.sum(datalin), np.sum(sel.get_mask()))

                assert_true(len('%s%r' % (sel, sel)) > 0)

                # see if voxels containing inner and outer
                # nodes were selected
                for sf in [inner, outer]:
                    for k, idxs in mp.iteritems():
                        xyz = np.reshape(sf.vertices[k, :], (1, 3))
                        linidx = vg.xyz2lin(xyz)

                        # only required if xyz is actually within the volume
                        assert_equal(linidx in idxs, vg.contains_lin(linidx))

                # check that it has all the attributes
                labs = sel.aux_keys()

                assert_true(all([lab in labs for lab in expected_labs]))


                if externals.exists('h5py'):
                    # some I/O testing
                    fd, fn = tempfile.mkstemp('.h5py', 'test'); os.close(fd)
                    h5save(fn, sel)

                    sel2 = h5load(fn)
                    os.remove(fn)

                    assert_equal(sel, sel2)
                else:
                    sel2 = sel

                # check that mask is OK even after I/O
                assert_array_equal(sel.get_mask(), sel2.get_mask())


                # test I/O with surfaces
                # XXX the @tempfile decorator only supports a single filename
                #     hence this method does not use it
                fd, outerfn = tempfile.mkstemp('outer.asc', 'test'); os.close(fd)
                fd, innerfn = tempfile.mkstemp('inner.asc', 'test'); os.close(fd)
                fd, volfn = tempfile.mkstemp('vol.nii', 'test'); os.close(fd)

                surf.write(outerfn, outer, overwrite=True)
                surf.write(innerfn, inner, overwrite=True)

                img = sel.volgeom.get_empty_nifti_image()
                img.to_filename(volfn)

                sel3 = surf_voxel_selection.run_voxel_selection(radius, volfn, innerfn,
                                outerfn, source_surf_nodes=srcs,
                                distance_metric=distance_metric)

                outer4 = surf.read(outerfn)
                inner4 = surf.read(innerfn)
                vsm4 = vs = volsurf.VolSurfMaximalMapping(vg, inner4, outer4)

                # check that two ways of voxel selection match
                sel4 = surf_voxel_selection.voxel_selection(vsm4, radius,
                                                    source_surf_nodes=srcs,
                                                    distance_metric=distance_metric)

                assert_equal(sel3, sel4)

                os.remove(outerfn)
                os.remove(innerfn)
                os.remove(volfn)


                # compare sel3 with other selection results
                # NOTE: which voxels are precisely selected by sel can be quite
                #       off from those in sel3, as writing the surfaces imposes
                #       rounding errors and the sphere is very symmetric, which
                #       means that different neighboring nodes are selected
                #       to select a certain number of voxels.
                sel3cmp_difference_ratio = [(sel, .2), (sel4, 0.)]
                for selcmp, ratio in sel3cmp_difference_ratio:
                    nunion = ndiff = 0

                    for k in selcmp.keys():
                        p = set(sel3.get(k))
                        q = set(selcmp.get(k))
                        nunion += len(p.union(q))
                        ndiff += len(p.symmetric_difference(q))

                    assert_true(float(ndiff) / float(nunion) <= ratio)

                # check searchlight call
                # as of late Aug 2012, this is with the fancy query engine
                # as implemented by Yarik

                mask = sel.get_mask()
                keys = None if ncenters is None else sel.keys()

                dset_data = np.reshape(np.arange(vg.nvoxels), vg.shape)
                dset_img = nb.Nifti1Image(dset_data, vg.affine)
                dset = fmri_dataset(samples=dset_img, mask=mask)

                qe = queryengine.SurfaceVerticesQueryEngine(sel,
                                    # you can optionally add additional
                                    # information about each near-disk-voxels
                                    add_fa=['center_distances',
                                            'grey_matter_position'])

                # test i/o ensuring that when loading it is still trained
                if externals.exists('h5py'):
                    fd, qefn = tempfile.mkstemp('qe.hdf5', 'test'); os.close(fd)
                    h5save(qefn, qe)
                    qe = h5load(qefn)
                    os.remove(qefn)


                assert_false('ERROR' in repr(qe))   #  to check if repr works
                voxelcounter = _Voxel_Count_Measure()
                searchlight = Searchlight(voxelcounter, queryengine=qe, roi_ids=keys, nproc=1,
                                          enable_ca=['roi_feature_ids', 'roi_center_ids'])
                sl_dset = searchlight(dset)

                selected_count = sl_dset.samples[0, :]
                mp = sel
                for i, k in enumerate(sel.keys()):
                    # check that number of selected voxels matches
                    assert_equal(selected_count[i], len(mp[k]))


                assert_equal(searchlight.ca.roi_center_ids, sel.keys())

                assert_array_equal(sl_dset.fa['center_ids'], qe.ids)

                # check nearest node is *really* the nearest node

                allvx = sel.get_targets()
                intermediate = outer * .5 + inner * .5

                for vx in allvx:
                    nearest = sel.target2nearest_source(vx)

                    xyz = intermediate.vertices[nearest, :]
                    sqsum = np.sum((xyz - intermediate.vertices) ** 2, 1)

                    idx = np.argmin(sqsum)
                    assert_equal(idx, nearest)

                if not tested_double_features:           # test only once
                    # see if we have multiple features for the same voxel, we would get them all
                    dset1 = dset.copy()
                    dset1.fa['dset'] = [1]
                    dset2 = dset.copy()
                    dset2.fa['dset'] = [2]
                    dset_ = hstack((dset1, dset2), 'drop_nonunique')
                    dset_.sa = dset1.sa
                    #dset_.a.imghdr = dset1.a.imghdr
                    assert_true('imghdr' in dset_.a.keys())
                    assert_equal(dset_.a['imghdr'].value, dset1.a['imghdr'].value)
                    roi_feature_ids = searchlight.ca.roi_feature_ids
                    sl_dset_ = searchlight(dset_)
                    # and we should get twice the counts
                    assert_array_equal(sl_dset_.samples, sl_dset.samples * 2)

                    # compare old and new roi_feature_ids
                    assert(len(roi_feature_ids) == len(searchlight.ca.roi_feature_ids))
                    nfeatures = dset.nfeatures
                    for old, new in zip(roi_feature_ids,
                                        searchlight.ca.roi_feature_ids):
                        # each new ids should comprise of old ones + (old + nfeatures)
                        # since we hstack'ed two datasets
                        assert_array_equal(np.hstack([(x, x + nfeatures) for x in old]),
                                           new)
                    tested_double_features = True

        # check whether number of voxels were selected is as expected
        expected_voxcount = [22, 93, 183, 183, 183, 183, 183, 183, 183]

        assert_equal(voxcount, expected_voxcount)
Esempio n. 7
0
    def test_surf(self, temp_fn):
        """Some simple testing with surfaces
        """

        s = surf.generate_sphere(10)

        assert_true(s.nvertices == 102)
        assert_true(s.nfaces == 200)

        v = s.vertices
        f = s.faces

        assert_true(v.shape == (102, 3))
        assert_true(f.shape == (200, 3))

        # another surface
        t = s * 10 + 2
        assert_true(t.same_topology(s))
        assert_array_equal(f, t.faces)

        assert_array_equal(v * 10 + 2, t.vertices)

        # allow updating, but should not affect original array
        # CHECKME: maybe we want to throw an exception instead
        assert_true((v * 10 + 2 == t.vertices).all().all())
        assert_true((s.vertices * 10 + 2 == t.vertices).all().all())

        # a few checks on vertices and nodes
        v_check = {40:(0.86511144 , -0.28109175, -0.41541501),
                   10:(0.08706015, -0.26794358, -0.95949297)}
        f_check = {10:(7, 8, 1), 40:(30, 31, 21)}


        vf_checks = [(v_check, lambda x:x.vertices),
                     (f_check, lambda x:x.faces)]

        eps = .0001
        for cmap, f in vf_checks:
            for k, v in cmap.iteritems():
                surfval = f(s)[k, :]
                assert_true((abs(surfval - v) < eps).all())

        # make sure same topology fails with different topology
        u = surf.generate_cube()
        assert_false(u.same_topology(s))

        # check that neighbours are computed correctly
        # even if we nuke the topology afterwards
        for _ in [0, 1]:
            nbrs = s.neighbors
            n_check = [(0, 96, 0.284629),
                       (40, 39, 0.56218349),
                       (100, 99, 0.1741202)]
            for i, j, k in n_check:
                assert_true(abs(nbrs[i][j] - k) < eps)


        def assign_zero(x):
            x.faces[:, :] = 0
            return None

        assert_raises((ValueError, RuntimeError), assign_zero, s)

        # see if mapping to high res works
        h = surf.generate_sphere(40)

        low2high = s.map_to_high_resolution_surf(h, .1)
        partmap = {7: 141, 8: 144, 9: 148, 10: 153, 11: 157, 12: 281}
        for k, v in partmap.iteritems():
            assert_true(low2high[k] == v)

        # ensure that slow implementation gives same results as fast one
        low2high_slow = s.map_to_high_resolution_surf(h, .1)
        for k, v in low2high.iteritems():
            assert_true(low2high_slow[k] == v)

        #  should fail if epsilon is too small
        assert_raises(ValueError,
                      lambda x:x.map_to_high_resolution_surf(h, .01), s)

        n2f = s.node2faces
        for i in xrange(s.nvertices):
            nf = [10] if i < 2 else [5, 6] # number of faces expected

            assert_true(len(n2f[i]) in nf)


        # test dijkstra distances
        ds2 = s.dijkstra_distance(2)
        some_ds = {0: 3.613173280799, 1: 0.2846296765, 2: 0.,
                 52: 1.87458018, 53: 2.0487004817, 54: 2.222820777,
                 99: 3.32854360, 100: 3.328543604, 101: 3.3285436042}

        eps = np.finfo('f').eps
        for k, v in some_ds.iteritems():
            assert_true(abs(v - ds2[k]) < eps)

        # test I/O (through ascii files)
        surf.write(temp_fn, s, overwrite=True)
        s2 = surf.read(temp_fn)

        # test i/o and ensure that the loaded instance is trained
        if externals.exists('h5py'):
            h5save(temp_fn, s2)
            s2 = h5load(temp_fn)


        assert_array_almost_equal(s.vertices, s2.vertices, 4)
        assert_array_almost_equal(s.faces, s2.faces, 4)

        # test plane (new feature end of August 2012)
        s3 = surf.generate_plane((0, 0, 0), (2, 0, 0), (0, 1, 0), 10, 20)
        assert_equal(s3.nvertices, 200)
        assert_equal(s3.nfaces, 342)
        assert_array_almost_equal(s3.vertices[-1, :], np.array([18., 19, 0.]))
        assert_array_almost_equal(s3.faces[-1, :], np.array([199, 198, 179]))

        # test bar
        p, q = (0, 0, 0), (100, 0, 0)
        s4 = surf.generate_bar(p, q, 10, 12)
        assert_equal(s4.nvertices, 26)
        assert_equal(s4.nfaces, 48)
Esempio n. 8
0
    def test_surf_gifti(self, fn):
            # From section 14.4 in GIFTI Surface Data Format Version 1.0
            # (with some adoptions)

            test_data = '''<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE GIFTI SYSTEM "http://www.nitrc.org/frs/download.php/1594/gifti.dtd">
<GIFTI
  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
  xsi:noNamespaceSchemaLocation="http://www.nitrc.org/frs/download.php/1303/GIFTI_Caret.xsd"
  Version="1.0"
  NumberOfDataArrays="2">
<MetaData>
  <MD>
    <Name><![CDATA[date]]></Name>
    <Value><![CDATA[Thu Nov 15 09:05:22 2007]]></Value>
  </MD>
</MetaData>
<LabelTable/>
<DataArray Intent="NIFTI_INTENT_POINTSET"
  DataType="NIFTI_TYPE_FLOAT32"
  ArrayIndexingOrder="RowMajorOrder"
  Dimensionality="2"
  Dim0="4"
  Dim1="3"
  Encoding="ASCII"
  Endian="LittleEndian"
  ExternalFileName=""
  ExternalFileOffset="">
<CoordinateSystemTransformMatrix>
  <DataSpace><![CDATA[NIFTI_XFORM_TALAIRACH]]></DataSpace>
  <TransformedSpace><![CDATA[NIFTI_XFORM_TALAIRACH]]></TransformedSpace>
  <MatrixData>
    1.000000 0.000000 0.000000 0.000000
    0.000000 1.000000 0.000000 0.000000
    0.000000 0.000000 1.000000 0.000000
    0.000000 0.000000 0.000000 1.000000
  </MatrixData>
</CoordinateSystemTransformMatrix>
<Data>
  10.5 0 0
  0 20.5 0
  0 0 30.5
  0 0 0
</Data>
</DataArray>
<DataArray Intent="NIFTI_INTENT_TRIANGLE"
  DataType="NIFTI_TYPE_INT32"
  ArrayIndexingOrder="RowMajorOrder"
  Dimensionality="2"
  Dim0="4"
  Dim1="3"
  Encoding="ASCII"
  Endian="LittleEndian"
  ExternalFileName="" ExternalFileOffset="">
<Data>
0 1 2
1 2 3
0 1 3
0 2 3
</Data>
</DataArray>
</GIFTI>'''

            with open(fn, 'w') as f:
                f.write(test_data)

            # test I/O
            s = surf.read(fn)
            surf.write(fn, s)
            s = surf.read(fn)

            v = np.zeros((4, 3))
            v[0, 0] = 10.5
            v[1, 1] = 20.5
            v[2, 2] = 30.5

            f = np.asarray([[0, 1, 2], [1, 2, 3], [0, 1, 3], [0, 2, 3]],
                            dtype=np.int32)

            assert_array_equal(s.vertices, v)
            assert_array_equal(s.faces, f)
Esempio n. 9
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    ext = format2extension(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = os.path.join(refdir, specfn)
            s = afni_suma_spec.read(specpathfn)

            specs.append(afni_suma_spec.read(specpathfn))

        add_states = ['inflated', 'full.patch.flat', 'sphere.reg']
        add_states_required = [True, False, True]  # flat surface is optional
        for add_state, is_req in zip(add_states, add_states_required):
            has_state = all([
                len(spec.find_surface_from_state(add_state)) == 1
                for spec in specs
            ])

            if not has_state:
                if is_req:
                    error('cannot find state %s' % add_state)
                else:
                    # skip this state
                    print "Optional state %s not found - skipping" % add_state
                    continue

            specs = afni_suma_spec.hemi_pairs_add_views(specs,
                                                        add_state,
                                                        ext,
                                                        refdir,
                                                        overwrite=overwrite)

        spec_both = afni_suma_spec.combine_left_right(specs)

        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged,
                                                   config['alsuffix'])
        specpathfn = os.path.join(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(
                spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x: os.path.join(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf.read(full_path(fn)) for fn in fns_in]

                if all(['full.patch.flat' in fn for fn in fns_in]):
                    # left hemi of flat; rotate 180 degrees, reposition again
                    surfs_in[0] = surfs_in[0] * [-1, -1, 1]
                    surfs_in = surf.reposition_hemisphere_pairs(
                        surfs_in[0], surfs_in[1], 'm')

                surf_merged = surf.merge(*surfs_in)

                if config['overwrite'] or not os.path.exists(
                        full_path(fn_out)):
                    surf.write(full_path(fn_out), surf_merged)
                    print "Merged surfaces written to %s" % fn_out
Esempio n. 10
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses align_epi_anat.py
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    fullext = config['outvol_fullext']
    ext = config['outvol_ext']

    if config['sid'] is None:
        raise ValueError('Need sid')

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol%(outvol_fullext)s' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol%(sssuffix)s%(outvol_fullext)s' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s%s' % (refdir, e_n, fullext)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1]  # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0"  # we all like unity, don't we?

    fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix
    aloutfns = ['%s%s' % (ssalprefix, fullext),
                fullmatrixfn]  # expected output files if alignment worked
    if config['overwrite'] or not all(
        [os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
        alignedfn = '%s/%s%s' % (refdir, ssalprefix, fullext)

        if config['identity']:
            fullmatrix_content = '"MATRIX(%s)"' % unity.replace(" ", ",")

            cmd = 'cd "%s"; cat_matvec %s > %s; 3dcopy -overwrite %s%s %s%s%s' % (
                refdir, fullmatrix_content, fullmatrixfn, a_n, ext, a_n,
                alignsuffix, ext)
        else:
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = (
                '-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None'
                if config['isepi'] else
                '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None'
            )
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            if config['template']:
                aea_opts += " -Allineate_opts '-maxrot 10 -maxshf 10 -maxscl 1.5'"
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (
                refdir, alignsuffix, twovolsuffix, aea_opts)

        cmds.append(cmd)

        cmds.append(_set_vol_space_cmd(alignedfn, config))

        utils.run_cmds(cmds, env)

    else:
        print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly

    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' %
                                                     (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' %
                    (refdir, fullmatrixfn, matrixfn))

    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s%s' % (refdir, alprefix, fullext)

    newgrid = 1  # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
    # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append(
            'cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s'
            % (refdir, newgrid, matrixfn, alprefix, surfvolfn))
        cmds.append(
            _set_vol_space_cmd('%s/%s+orig' % (refdir, alprefix), config))

    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s%s' % (f, fullext) for f in [ssalprefix, alprefix]]
    headernukefields = [
        'ALLINEATE_MATVEC_B2S_000000', 'ALLINEATE_MATVEC_S2B_000000',
        'WARPDRIVE_MATVEC_FOR_000000', 'WARPDRIVE_MATVEC_INV_000000'
    ]

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (
                refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)
    utils.run_cmds('; '.join(cmds), env)
    cmds = []

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        use_ss = config['expvol_ss']

        # ae_{e,s}_n are AddEdge names for expvol and surfvol
        ae_e_n = utils.afni_fileparts(config['expvol'])[1]
        if use_ss:
            ae_e_n += config['sssuffix']
        ae_s_n = ssalprefix  #if use_ss else alprefix

        # *_ne have the output extension as well
        ae_e_ne = ae_e_n + ext
        ae_s_ne = ae_s_n + ext

        addedge_fns = ['%s/_ae.ExamineList.log' % refdir]

        exts = ['HEAD', 'BRIK']
        orig_ext = '+orig'
        addedge_rootfns = [
            '%s_%s%%s' % (ae_e_n, postfix)
            for postfix in ['e3', 'ec', ae_s_n + '_ec']
        ]
        addedge_rootfns.extend(
            ['%s_%s%%s' % (ae_s_n, postfix) for postfix in ['e3', 'ec']])

        addedge_fns_pat = [
            '%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts
        ]

        addegde_pathfns_orig = map(lambda x: os.path.join(refdir, x % '+orig'),
                                   addedge_fns_pat) + addedge_fns
        addegde_pathfns_ext = map(lambda x: os.path.join(refdir, x % ext),
                                  addedge_fns_pat)
        addegde_exists = map(os.path.exists, addegde_pathfns_ext)
        if overwrite or not all(addegde_exists):
            ae_ns = (ae_e_n, ae_s_n)

            cmds.extend(
                map(lambda fn: 'if [ -e "%s" ]; then rm "%s"; fi' % (fn, fn),
                    addegde_pathfns_orig + addegde_pathfns_ext))
            cmds.append(';'.join(['cd %s' % refdir] + [
                _convert_vol_space_to_orig_cmd('%s/%s%s' % (refdir, n, ext))
                for n in ae_ns
            ] + ['\@AddEdge %s+orig %s+orig' % ae_ns]))

            set_space_fns = addegde_pathfns_orig + [
                '%s/%s%s.%s' % (refdir, fn, orig_ext, exts[0]) for fn in ae_ns
            ]

            for fn in set_space_fns:  #['%s/%s' % (refdir, fn % orig_ext) for fn in addedge_fns_pat]:
                if fn.endswith('.log'):
                    continue
                cmds.append('if [ -e %s ]; then %s; fi' %
                            (fn, _set_vol_space_cmd(fn, config)))

            utils.run_cmds(cmds, env)
            cmds = []

        else:
            print "AddEdge seems to have been run already"

        sid = config['sid']
        plot_slice_fns = [
            (ae_e_n + '_e3', ae_s_n + '_e3', '%s_qa_e3.png' % sid),
            (None, ae_e_n + '_' + ae_s_n + '_ec', '%s_qa_ec.png' % sid)
        ]

        plot_slice_imgfns = ['%s/%s' % (refdir, fn) for fn in plot_slice_fns]
        if overwrite or not all(map(os.path.exists, plot_slice_imgfns)):
            slice_dims = [0, 1, 2]
            slice_pos = [.35, .45, .55, .65]
            for fns in plot_slice_fns:
                input_fns = []
                for i, fn in enumerate(fns):
                    if fn is not None:
                        fn = '%s/%s' % (refdir, fn)
                        if i <= 1:
                            fn += ext
                    input_fns.append(fn)

                fn1, fn2, fnout = input_fns
                if not os.path.exists(fnout):
                    _make_slice_plot(fn1, fn2, fnout)
                    print "QA Image saved to %s" % fnout
                else:
                    print "Already exists: %s" % fnout
        else:
            print "QA images already exist"

    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = (
            'cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
            (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)

    origext = '.asc'
    ext = format2extension(config)
    tp = format2type(config)
    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*%s' % (config['mi_icopat'] % icold, hemi, origext)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    if not sumafile.endswith(origext):
                        raise ValueError("%s does not end with %s" %
                                         (sumafile, origext))
                    #s = sumafile.split(".")
                    #s[len(s) - 2] += config['alsuffix'] # insert '_al' just before last dot
                    #alsumafile = ".".join(s)
                    extsumafile = sumafile[:-len(origext)]
                    alsumafile = extsumafile + config['alsuffix'] + ext

                    if config['overwrite'] or not os.path.exists(
                            '%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_%s ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, tp, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg%s' % (pat, ext)):
                        sumaout = '%s/%s' % (refdir, extsumafile + ext)
                        if config['overwrite'] or not os.path.exists(sumaout):
                            s = surf.read('%s/%s' % (sumadir, sumafile))
                            surf.write(s, sumaout)
                            #cmds.append('cp %s/%s %s/%s' % (sumadir, sumafile, refdir, sumafile))

        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = os.path.join(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = os.path.join(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Esempio n. 11
0
def average_fs_asc_surfs(fn1, fn2, fnout):
    '''averages two surfaces'''
    surf1 = surf.read(fn1)
    surf2 = surf.read(fn2)
    surfavg = surf1 * .5 + surf2 * .5
    surf.write(fnout, surfavg)
Esempio n. 12
0
def run_makespec_bothhemis(config, env):
    refdir = config['refdir']
    overwrite = config['overwrite']
    icolds, hemis = _get_hemis_icolds(config)

    ext = format2extension(config)

    if hemis != ['l', 'r']:
        raise ValueError("Cannot run without left and right hemisphere")

    for icold in icolds:
        specs = []
        for hemi in hemis:
            #surfprefix = '%s%sh' % (config['mi_icopat'] % icold, hemi)
            specfn = afni_suma_spec.canonical_filename(icold, hemi,
                                                       config['alsuffix'])
            specpathfn = pathjoin(refdir, specfn)
            s = afni_suma_spec.read(specpathfn)

            specs.append(afni_suma_spec.read(specpathfn))

        add_states = ['inflated', 'full.patch.flat', 'sphere.reg']
        add_states_required = [True, False, True] # flat surface is optional
        for add_state, is_req in zip(add_states, add_states_required):
            has_state = all([len(spec.find_surface_from_state(add_state)) == 1
                                    for spec in specs])

            if not has_state:
                if is_req:
                    error('cannot find state %s' % add_state)
                else:
                    # skip this state
                    print "Optional state %s not found - skipping" % add_state
                    continue

            specs = afni_suma_spec.hemi_pairs_add_views(specs,
                            add_state, ext, refdir, overwrite=overwrite)


        spec_both = afni_suma_spec.combine_left_right(specs)


        # generate spec files for both hemispheres
        hemiboth = 'b'
        specfn = afni_suma_spec.canonical_filename(icold, hemiboth, config['alsuffix'])
        specpathfn = pathjoin(refdir, specfn)
        spec_both.write(specpathfn, overwrite=overwrite)

        # merge left and right into one surface
        # and generate the spec files as well
        hemimerged = 'm'
        specfn = afni_suma_spec.canonical_filename(icold, hemimerged, config['alsuffix'])
        specpathfn = pathjoin(refdir, specfn)

        if config['overwrite'] or not os.path.exists(specpathfn):
            spec_merged, surfs_to_join = afni_suma_spec.merge_left_right(spec_both)
            spec_merged.write(specpathfn, overwrite=overwrite)

            full_path = lambda x:pathjoin(refdir, x)
            for fn_out, fns_in in surfs_to_join.iteritems():
                surfs_in = [surf.read(full_path(fn)) for fn in fns_in]

                if all(['full.patch.flat' in fn for fn in fns_in]):
                    # left hemi of flat; rotate 180 degrees, reposition again
                    surfs_in[0] = surfs_in[0] * [-1, -1, 1]
                    surfs_in = surf.reposition_hemisphere_pairs(surfs_in[0], surfs_in[1], 'm')

                surf_merged = surf.merge(*surfs_in)

                if config['overwrite'] or not os.path.exists(full_path(fn_out)):
                    surf.write(full_path(fn_out), surf_merged)
                    print "Merged surfaces written to %s" % fn_out
Esempio n. 13
0
def run_alignment(config, env):
    '''Aligns anat (which is assumed to be aligned with EPI data) to FreeSurfer SurfVol

    This function strips the anatomicals (by default), then uses align_epi_anat.py
    to estimate the alignment, then applies this transformation to the non-skull-stripped
    SurfVol and also to the surfaces. Some alignment headers will be nuked'''
    overwrite = config['overwrite']
    alignsuffix = config['al2expsuffix']
    refdir = config['refdir']

    fullext = config['outvol_fullext']
    ext = config['outvol_ext']

    if config['sid'] is None:
        raise ValueError('Need sid')

    cmds = []
    if not os.path.exists(config['refdir']):
        cmds.append('mkdir %(refdir)s' % config)

    # two volumes may have to be stripped: the inpput anatomical, and the surfvol.
    # put them in a list here and process them similarly
    surfvol = '%(refdir)s/%(sid)s_SurfVol%(outvol_fullext)s' % config
    surfvol_ss = '%(refdir)s/%(sid)s_SurfVol%(sssuffix)s%(outvol_fullext)s' % config

    e_p, e_n, _, _ = utils.afni_fileparts(config['expvol'])
    if config['expvol_ss']:
        e_n = '%s%s' % (e_n, config['sssuffix'])
    expvol = '%s/%s%s' % (refdir, e_n, fullext)

    volsin = [surfvol_ss, expvol]
    for volin in volsin:
        if not os.path.exists(volin):
            raise ValueError('File %s does not exist' % volin)

    a_n = utils.afni_fileparts(volsin[0])[1] # surfvol input root name
    ssalprefix = '%s%s' % (a_n, alignsuffix)

    unity = "1 0 0 0 0 1 0 0 0 0 1 0" # we all like unity, don't we?

    fullmatrixfn = '%s_mat.aff12.1D' % ssalprefix
    aloutfns = ['%s%s' % (ssalprefix, fullext), fullmatrixfn] # expected output files if alignment worked
    if config['overwrite'] or not all([os.path.exists('%s/%s' % (refdir, f)) for f in aloutfns]):
        alignedfn = '%s/%s%s' % (refdir, ssalprefix, fullext)

        if config['identity']:
            fullmatrix_content = '"MATRIX(%s)"' % unity.replace(" ", ",")

            cmd = 'cd "%s"; cat_matvec %s > %s; 3dcopy -overwrite %s%s %s%s%s' % (refdir, fullmatrix_content, fullmatrixfn, a_n, ext, a_n, alignsuffix, ext)
        else:
            # use different inputs depending on whether expvol is EPI or ANAT
            twovolpat = ('-anat %s -epi %s -anat2epi -epi_base 0 -anat_has_skull no -epi_strip None' if config['isepi']
                       else '-dset1 %s -dset2 %s -dset1to2 -dset1_strip None -dset2_strip None')
            # use this pattern to generate a suffix
            twovolsuffix = twovolpat % (volsin[0], volsin[1])

            aea_opts = config['aea_opts']
            if config['template']:
                aea_opts += " -Allineate_opts '-maxrot 10 -maxshf 10 -maxscl 1.5'"
            # align_epi_anat.py
            cmd = 'cd "%s"; align_epi_anat.py -overwrite -suffix %s %s %s' % (refdir, alignsuffix, twovolsuffix, aea_opts)

        cmds.append(cmd)

        cmds.append(_set_vol_space_cmd(alignedfn, config))

        utils.run_cmds(cmds, env)

    else:
        print "Alignment already done - skipping"

        # run these commands first, then check if everything worked properly


    cmds = []

    # see if the expected transformation file was found
    if not config['identity'] and not os.path.exists('%s/%s' % (refdir, fullmatrixfn)):
        raise Exception("Could not find %s in %s" % (fullmatrixfn, refdir))

    # now make a 3x4 matrix
    matrixfn = '%s%s.A2E.1D' % (a_n, alignsuffix)
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn)):
        cmds.append('cd "%s"; cat_matvec %s > %s || exit 1' % (refdir, fullmatrixfn, matrixfn))


    # make an aligned, non-skullstripped version of SurfVol in refdir
    alprefix = '%s_SurfVol%s' % (config['sid'], alignsuffix)
    svalignedfn = '%s/%s%s' % (refdir, alprefix, fullext)

    newgrid = 1 # size of anatomical grid in mm. We'll have to resample, otherwise 3dWarp does
              # not respect the corners of the volume (as of April 2012)

    if overwrite or not os.path.exists(svalignedfn):
        #if not config['fs_sid']:
        #    raise ValueError("Don't have a freesurfer subject id - cannot continue")

        #surfvolfn = '%s/%s_SurfVol+orig' % (config['sumadir'], config['fs_sid'])
        surfvolfn = '%s/T1.nii' % config['sumadir']
        cmds.append('cd "%s";3dWarp -overwrite -newgrid %f -matvec_out2in `cat_matvec -MATRIX %s` -prefix ./%s %s' %
                    (refdir, newgrid, matrixfn, alprefix, surfvolfn))
        cmds.append(_set_vol_space_cmd('%s/%s+orig' % (refdir, alprefix), config))

    else:
        print '%s already exists - skipping Warp' % svalignedfn

    utils.run_cmds(cmds, env)
    cmds = []

    # nuke afni headers
    headernukefns = ['%s%s' % (f, fullext) for f in [ssalprefix, alprefix]]
    headernukefields = ['ALLINEATE_MATVEC_B2S_000000',
                      'ALLINEATE_MATVEC_S2B_000000',
                      'WARPDRIVE_MATVEC_FOR_000000',
                      'WARPDRIVE_MATVEC_INV_000000']

    for fn in headernukefns:
        for field in headernukefields:
            # nuke transformation - otherwise AFNI does this unwanted transformation for us
            fullfn = '%s/%s' % (refdir, fn)

            if not (os.path.exists(fullfn) or config['identity']):
                raise ValueError("File %r does not exist" % fullfn)

            refitcmd = "3drefit -atrfloat %s '%s' %s" % (field, unity, fn)

            # only refit if not already in AFNI history (which is stored in HEADfile)
            cmd = 'cd "%s"; m=`grep "%s" %s | wc -w`; if [ $m -eq 0 ]; then %s; else echo "File %s seems already 3drefitted"; fi' % (refdir, refitcmd, fn, refitcmd, fn)
            cmds.append(cmd)
    utils.run_cmds('; '.join(cmds), env)
    cmds = []

    # run AddEdge so that volumes can be inspected visually for alignment
    if config['AddEdge']:
        use_ss = config['expvol_ss']

        # ae_{e,s}_n are AddEdge names for expvol and surfvol
        ae_e_n = utils.afni_fileparts(config['expvol'])[1]
        if use_ss:
            ae_e_n += config['sssuffix']
        ae_s_n = ssalprefix #if use_ss else alprefix

        # *_ne have the output extension as well
        ae_e_ne = ae_e_n + ext
        ae_s_ne = ae_s_n + ext

        addedge_fns = ['%s/_ae.ExamineList.log' % refdir]

        exts = ['HEAD', 'BRIK']
        orig_ext = '+orig'
        addedge_rootfns = ['%s_%s%%s' % (ae_e_n, postfix)
                            for postfix in ['e3', 'ec', ae_s_n + '_ec']]
        addedge_rootfns.extend(['%s_%s%%s' % (ae_s_n, postfix)
                            for postfix in ['e3', 'ec']])

        addedge_fns_pat = ['%s.%s' % (fn, e) for fn in addedge_rootfns for e in exts]

        addegde_pathfns_orig = map(lambda x:pathjoin(refdir, x % '+orig'), addedge_fns_pat) + addedge_fns
        addegde_pathfns_ext = map(lambda x:pathjoin(refdir, x % ext), addedge_fns_pat)
        addegde_exists = map(os.path.exists, addegde_pathfns_ext)
        if overwrite or not all(addegde_exists):
            ae_ns = (ae_e_n, ae_s_n)

            cmds.extend(map(lambda fn : 'if [ -e "%s" ]; then rm "%s"; fi' % (fn, fn), addegde_pathfns_orig + addegde_pathfns_ext))
            cmds.append(';'.join(['cd %s' % refdir] +
                                 [_convert_vol_space_to_orig_cmd('%s/%s%s' % (refdir, n, ext))
                                            for n in ae_ns] +
                                 ['\@AddEdge %s+orig %s+orig' % ae_ns]))

            set_space_fns = addegde_pathfns_orig + ['%s/%s%s.%s' % (refdir, fn, orig_ext, exts[0]) for fn in ae_ns]

            for fn in set_space_fns: #['%s/%s' % (refdir, fn % orig_ext) for fn in addedge_fns_pat]:
                if fn.endswith('.log'):
                    continue
                cmds.append('if [ -e %s ]; then %s; fi' % (fn, _set_vol_space_cmd(fn, config)))

            utils.run_cmds(cmds, env)
            cmds = []

        else:
            print "AddEdge seems to have been run already"

        sid = config['sid']
        plot_slice_fns = [(ae_e_n + '_e3', ae_s_n + '_e3', '%s_qa_e3.png' % sid),
                          (None, ae_e_n + '_' + ae_s_n + '_ec', '%s_qa_ec.png' % sid)]


        plot_slice_imgfns = ['%s/%s' % (refdir, fn) for fn in plot_slice_fns]
        if overwrite or not all(map(os.path.exists, plot_slice_imgfns)):
            slice_dims = [0, 1, 2]
            slice_pos = [.35, .45, .55, .65]
            for fns in plot_slice_fns:
                input_fns = []
                for i, fn in enumerate(fns):
                    if fn is not None:
                        fn = '%s/%s' % (refdir, fn)
                        if i <= 1:
                            fn += ext
                    input_fns.append(fn)

                fn1, fn2, fnout = input_fns
                if not os.path.exists(fnout):
                    _make_slice_plot(fn1, fn2, fnout)
                    print "QA Image saved to %s" % fnout
                else:
                    print "Already exists: %s" % fnout
        else:
            print "QA images already exist"



    # because AFNI uses RAI orientation but FreeSurfer LPI, make a new
    # affine transformation matrix in which the signs of
    # x and y coordinates are negated before and after the transformation
    matrixfn_LPI2RAI = '%s.A2E_LPI.1D' % ssalprefix
    if overwrite or not os.path.exists('%s/%s' % (refdir, matrixfn_LPI2RAI)):
        lpirai = '"MATRIX(-1,0,0,0,0,-1,0,0,0,0,1,0)"'
        cmd = ('cd %s; cat_matvec -ONELINE %s `cat_matvec -MATRIX %s` %s > %s' %
             (refdir, lpirai, matrixfn, lpirai, matrixfn_LPI2RAI))
        cmds.append(cmd)

    # apply transformation to surfaces
    [icolds, hemis] = _get_hemis_icolds(config)
    sumadir = config['sumadir']
    sumafiles = os.listdir(sumadir)


    origext = '.asc'
    ext = format2extension(config)
    tp = format2type(config)
    # process all hemispheres and ld values
    for icold in icolds:
        for hemi in hemis:
            pat = '%s%sh.?*%s' % (config['mi_icopat'] % icold, hemi, origext)
            for sumafile in sumafiles:
                if fnmatch.fnmatch(sumafile, pat):
                    if not sumafile.endswith(origext):
                        raise ValueError("%s does not end with %s" % (sumafile, origext))
                    #s = sumafile.split(".")
                    #s[len(s) - 2] += config['alsuffix'] # insert '_al' just before last dot
                    #alsumafile = ".".join(s)
                    extsumafile = sumafile[:-len(origext)]
                    alsumafile = extsumafile + config['alsuffix'] + ext

                    if config['overwrite'] or not os.path.exists('%s/%s' % (refdir, alsumafile)):
                        # now apply transformation
                        cmd = 'cd "%s";ConvertSurface -overwrite -i_fs %s/%s -o_%s ./%s -ixmat_1D %s' % \
                              (refdir, sumadir, sumafile, tp, alsumafile, matrixfn_LPI2RAI)
                        cmds.append(cmd)

                    # as of June 2012 copy the original sphere.reg (not aligned) as well
                    if sumafile == ('%s.sphere.reg%s' % (pat, ext)):
                        sumaout = '%s/%s' % (refdir, extsumafile + ext)
                        if config['overwrite'] or not os.path.exists(sumaout):
                            s = surf.read('%s/%s' % (sumadir, sumafile))
                            surf.write(s, sumaout)
                            #cmds.append('cp %s/%s %s/%s' % (sumadir, sumafile, refdir, sumafile))


        mapfn = (config['mi_icopat'] % icold) + config['hemimappingsuffix']
        srcpathfn = pathjoin(sumadir, mapfn)

        if os.path.exists(srcpathfn):
            trgpathfn = pathjoin(refdir, mapfn)
            if not os.path.exists(trgpathfn) or config['overwrite']:
                cmds.append('cp %s %s' % (srcpathfn, trgpathfn))

    utils.run_cmds(cmds, env)
Esempio n. 14
0
def average_fs_asc_surfs(fn1, fn2, fnout):
    '''averages two surfaces'''
    surf1 = surf.read(fn1)
    surf2 = surf.read(fn2)
    surfavg = surf1 * .5 + surf2 * .5
    surf.write(fnout, surfavg)
Esempio n. 15
0
def average_fs_asc_surfs(fn1, fn2, fnout):
    """averages two surfaces"""
    surf1 = surf.read(fn1)
    surf2 = surf.read(fn2)
    surfavg = surf1 * 0.5 + surf2 * 0.5
    surf.write(fnout, surfavg)