Exemplo n.º 1
0
def project_fmri_from_kernels(input_mesh, kernels_file, fmri_data_file,
                              output_tex, bin_threshold=None, ):

    pyhrf.verbose(2,'Project data onto mesh using kernels ...')

    if 0:
        print 'Projecting ...'
        print 'func data:', fmri_data_file
        print 'Mesh file:', input_mesh
        print 'Save as:', output_tex

    pyhrf.verbose(2,'Call AimsFunctionProjection -op 1 ...')    

    data_files = []
    output_texs = []
    p_ids = None
    if bin_threshold is not None:
        d,h = read_volume(fmri_data_file)
        if np.allclose(d.astype(int), d):
            tmp_dir = pyhrf.get_tmp_path()
            p_ids = np.unique(d)
            pyhrf.verbose(2, 'bin threshold: %f' %bin_threshold)
            pyhrf.verbose(2, 'pids(n=%d): %d...%d' \
                              %(len(p_ids),min(p_ids),max(p_ids)))
            for i,p_id in enumerate(p_ids):
                if p_id != 0:
                    new_p = np.zeros_like(d)
                    new_p[np.where(d==p_id)] = i + 1 #0 is background
                    ifn = op.join(tmp_dir,'pmask_%d.nii'%p_id)
                    write_volume(new_p, ifn, h)
                    data_files.append(ifn)
                    ofn = op.join(tmp_dir,'ptex_%d.gii'%p_id)
                    output_texs.append(ofn)
        else:
            data_files.append(fmri_data_file)
            output_texs.append(output_tex)
    else:
        data_files.append(fmri_data_file)
        output_texs.append(output_tex)

    pyhrf.verbose(3, 'input data files: %s' %str(data_files))
    pyhrf.verbose(3, 'output data files: %s' %str(output_texs))

    for data_file, o_tex in zip(data_files, output_texs):
        projection = [ 
            'AimsFunctionProjection', 
            '-op', '1',
            '-d', kernels_file,
            '-d1', data_file,
            '-m', input_mesh,
            '-o', o_tex
            ]

        cmd = ' '.join(map(str,projection))
        pyhrf.verbose(3, 'cmd: %s' %cmd)
        os.system(cmd)

    if bin_threshold is not None:
        pyhrf.verbose(2, 'Binary threshold of texture at %f' %bin_threshold)
        o_tex = output_texs[0]
        data,data_gii = read_texture(o_tex)
        data = (data>bin_threshold).astype(np.int32)
        print 'data:', data.dtype
        if p_ids is not None:
            for pid, o_tex in zip(p_ids[1:], output_texs[1:]):
                pdata,pdata_gii = read_texture(o_tex)
                data += (pdata>bin_threshold).astype(np.int32) * pid

        #assert (np.unique(data) == p_ids).all()
        write_texture(data, output_tex, intent='NIFTI_INTENT_LABEL')
Exemplo n.º 2
0
def parcellation_for_jde(fmri_data, avg_parcel_size=250, output_dir=None,
                         method='gkm', glm_drift='Cosine', glm_hfcut=128):
    """
    method: gkm, ward, ward_and_gkm
    """

    if output_dir is None:
        output_dir = tempfile.mkdtemp(prefix='pyhrf_JDE_parcellation_GLM',
                                      dir=pyhrf.cfg['global']['tmp_path'])
    glm_output_dir = op.join(output_dir, 'GLM_for_parcellation')
    if not op.exists(glm_output_dir): os.makedirs(glm_output_dir)

    pyhrf.verbose(1, 'GLM for parcellation')

    # if fmri_data.data_type == 'volume':
    #     paradigm_file, bold_file, mask_file = fmri_data.save(glm_output_dir)
    #     beta_files = glm_nipy_from_files(bold_file, fmri_data.tr, paradigm_file,
    #                                      glm_output_dir, mask_file,
    #                                      drift_model=glm_drift, hfcut=glm_hfcut)
    # elif fmri_data.data_type == 'surface':
    #     beta_files = glm_nipy(fmri_data, glm_output_dir,
    #                           drift_model=glm_drift, hfcut=glm_hfcut)

    g, dm, cons = glm_nipy(fmri_data, drift_model=glm_drift, hfcut=glm_hfcut)

    pval_files = []
    if cons is not None:
        func_data = [('con_pval_%s' %cname, con.pvalue()) \
                         for cname, con in cons.iteritems()]
    else:
        reg_cst_drift = re.compile(".*constant.*|.*drift.*")
        func_data = [('beta_%s' %reg_name, g.beta[ir]) \
                         for ir,reg_name in enumerate(dm.names) \
                         if not reg_cst_drift.match(reg_name)]

    for name, data in func_data:
        val_vol = expand_array_in_mask(data, fmri_data.roiMask>0)
        val_fn = op.join(glm_output_dir, '%s.nii' %name)
        write_volume(val_vol, val_fn, fmri_data.meta_obj)
        pval_files.append(val_fn)

    mask_file = op.join(glm_output_dir,'mask.nii')
    write_volume(fmri_data.roiMask>0, mask_file, fmri_data.meta_obj)

    nvox = fmri_data.get_nb_vox_in_mask()
    nparcels = round_nb_parcels(nvox * 1. / avg_parcel_size)

    pyhrf.verbose(1, 'Parcellation from GLM outputs, method: %s, ' \
                      'nb parcels: %d' %(method, nparcels))

    if fmri_data.data_type == 'volume':
        parcellation_file = op.join(output_dir, 'parcellation_%s_np%d.nii'
                                    %(method, nparcels))

        make_parcellation_from_files(pval_files, mask_file, parcellation_file,
                                     nparcels, method)
        parcellation,_ = read_volume(parcellation_file)
    else:
        mesh_file = fmri_data.data_files[-1]
        parcellation_file = op.join(output_dir, 'parcellation_%s_np%d.gii'
                                    %(method, nparcels))
        make_parcellation_surf_from_files(pval_files, mesh_file,
                                          parcellation_file, nparcels, method,
                                          verbose=1)
        parcellation,_ = read_texture(parcellation_file)
    #print parcellation_file


    pyhrf.verbose(1, parcellation_report(parcellation))

    return parcellation, parcellation_file
Exemplo n.º 3
0
Arquivo: core.py Projeto: Solvi/pyhrf
def load_surf_bold_mask(bold_files, mesh_file, mask_file=None):

    pyhrf.verbose(1, 'Load mesh: ' + mesh_file)
    coords,triangles,coord_sys = read_mesh(mesh_file)
    pyhrf.verbose(2, 'Build graph ... ')
    fgraph = graph_from_mesh(triangles)
    assert graph_is_sane(fgraph)

    pyhrf.verbose(1, 'Mesh has %d nodes' %len(fgraph))

    pyhrf.verbose(2, 'Compute length of edges ... ')
    edges_l = np.array([np.array([distance(coords[i],
                                           coords[n],
                                           coord_sys.xform) \
                                      for n in nl]) \
                            for i,nl in enumerate(fgraph)],dtype=object)

    if mask_file is None or not op.exists(mask_file):
        pyhrf.verbose(1,'Mask file %s does not exist. Taking '\
                          ' all nodes ...' %mask_file)
        mask = np.ones(len(fgraph))
        mask_meta_obj = None
        mask_loaded_from_file = False
    else:
        mask, mask_meta_obj = read_texture(mask_file)
        mask_loaded_from_file = True

    if not (np.round(mask) == mask).all():
        raise Exception("Mask is not n-ary")


    if len(mask) != len(fgraph):
        raise Exception('Size of mask (%d) is different from size '\
                        'of graph (%d)' %(len(mask),len(fgraph)))

    mask = mask.astype(np.int32)
    if mask.min() == -1:
        mask += 1

    #Split graph into rois:
    graphs = {}
    edge_lengths = {}
    for roiId in np.unique(mask):
        mroi = np.where(mask==roiId)
        g, nm = sub_graph(fgraph, mroi[0])
        edge_lengths[roiId] = edges_l[mroi]
        graphs[roiId] = g


    #Load BOLD:
    lastScan = 0
    sessionScans = []
    bolds = []
    for boldFile in bold_files:
        pyhrf.verbose(1, 'load bold: ' + boldFile)
        b,_ = read_texture(boldFile)
        pyhrf.verbose(1, 'bold shape: ' + str(b.shape))
        bolds.append(b)
        sessionScans.append(np.arange(lastScan, lastScan+b.shape[0],
                                      dtype=int))
        lastScan += b.shape[0]

    bold = np.concatenate(tuple(bolds))
    if len(fgraph) != bold.shape[1]:
        raise Exception('Nb positions not consistent between BOLD (%d) '\
                        'and mesh (%d)' %(bold.shape[1],len(fgraph)))

    # discard bad data (bold with var=0 and nan values):
    discard_bad_data(bold, mask, time_axis=0)

    return mask, mask_meta_obj, mask_loaded_from_file, bold, sessionScans, \
        graphs, edge_lengths
Exemplo n.º 4
0
def make_parcellation_surf_from_files(beta_files, mesh_file, parcellation_file,
                                      nbparcel, method, mu=10., verbose=0):

    if method not in ['ward', 'gkm', 'ward_and_gkm', 'kmeans']:
        raise ValueError('unknown method')


    # step 1: load the data ----------------------------
    # 1.1 the domain
    pyhrf.verbose(3, 'domain from mesh: %s' %mesh_file)
    domain = domain_from_mesh(mesh_file)

    coord = domain.coord

    # 1.3 read the functional data
    beta = np.array([read_texture(b)[0] for b in beta_files]).T

    pyhrf.verbose(3, 'beta: %s' %str(beta.shape))
    pyhrf.verbose(3, 'mu * coord / np.std(coord): %s' \
                      %(mu * coord / np.std(coord)).shape)
    feature = np.hstack((beta, mu * coord / np.std(coord)))

    if method is not 'kmeans':
        # print 'domain.topology:', domain.topology.__class__
        # print domain.topology
        #print dir(domain.topology)
        # print 'feature:', feature.shape
        # print feature
        g = field_from_coo_matrix_and_data(domain.topology, feature)
        # print 'g:', g.__class__
        # print g


    if method == 'kmeans':
        _, u, _ = kmeans(feature, nbparcel)

    if method == 'ward':
        u, _ = g.ward(nbparcel)

    if method == 'gkm':
        seeds = np.argsort(np.random.rand(g.V))[:nbparcel]
        _, u, _ = g.geodesic_kmeans(seeds)

    if method == 'ward_and_gkm':
        w, _ = g.ward(nbparcel)
        _, u, _ = g.geodesic_kmeans(label=w)

    # print 'u:'
    # print u

    lpa = SubDomains(domain, u, 'parcellation')

    if verbose:
        var_beta = np.array(
            [np.var(beta[lpa.label == k], 0).sum() for k in range(lpa.k)])
        var_coord = np.array(
            [np.var(coord[lpa.label == k], 0).sum() for k in range(lpa.k)])
        size = lpa.get_size()
        vf = np.dot(var_beta, size) / size.sum()
        va = np.dot(var_coord, size) / size.sum()
        print nbparcel, "functional variance", vf, "anatomical variance", va

    # step3:  write the resulting label image
    if parcellation_file is not None:
        label_image = parcellation_file
    # elif write_dir is not None:
    #     label_image = os.path.join(write_dir, "parcel_%s.nii" % method)
    else:
        label_image = None

    if label_image is not None:
        #lpa.to_image(label_image, descrip='Intra-subject parcellation image')
        write_texture(u.astype(np.int32), label_image)
        if verbose:
            print "Wrote the parcellation images as %s" % label_image

    return u, label_image