Ejemplo n.º 1
0
def test_can_run():
    import numpy as np
    from brainiak.factoranalysis.htfa import HTFA
    from mpi4py import MPI
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    n_voxel = 100
    n_tr = 20
    K = 5
    max_global_iter = 3
    max_local_iter = 3
    max_voxel = n_voxel
    max_tr = n_tr
    R = []
    n_subj = 2
    for s in np.arange(n_subj):
        R.append(np.random.randint(2, high=102, size=(n_voxel, 3)))
    my_R = []
    for idx in np.arange(n_subj):
        if idx % size == rank:
            my_R.append(R[idx])

    htfa = HTFA(
        K,
        n_subj=n_subj,
        max_global_iter=max_global_iter,
        max_local_iter=max_local_iter,
        max_voxel=max_voxel,
        max_tr=max_tr,
        verbose=True)
    assert htfa, "Invalid HTFA instance!"

    X = []
    for s in np.arange(n_subj):
        X.append(np.random.rand(n_voxel, n_tr))
    my_data = []
    for idx in np.arange(n_subj):
        if idx % size == rank:
            my_data.append(X[idx])

    if rank == 0:
        htfa.fit(my_data, R=my_R)
        assert True, "Root successfully running HTFA"
        assert htfa.global_prior_.shape[0] == htfa.prior_bcast_size,\
            "Invalid result of HTFA! (wrong # element in global_prior)"
        assert htfa.global_posterior_.shape[0] == htfa.prior_bcast_size,\
            "Invalid result of HTFA! (wrong # element in global_posterior)"

    else:
        htfa.fit(my_data, R=my_R)
        assert True, "worker successfully running HTFA"
        print(htfa.local_weights_.shape)
        assert htfa.local_weights_.shape[0] == n_tr * K,\
            "Invalid result of HTFA! (wrong # element in local_weights)"
        assert htfa.local_posterior_.shape[0] == htfa.prior_size,\
            "Invalid result of HTFA! (wrong # element in local_posterior)"
Ejemplo n.º 2
0
def test_can_run():
    import numpy as np
    from brainiak.factoranalysis.htfa import HTFA
    from mpi4py import MPI
    comm = MPI.COMM_WORLD
    rank = comm.Get_rank()
    size = comm.Get_size()

    n_voxel = 100
    n_tr = 20
    K = 5
    max_global_iter = 3
    max_local_iter = 3
    max_voxel = n_voxel
    max_tr = n_tr
    R = []
    n_subj = 2
    for s in np.arange(n_subj):
        R.append(np.random.randint(2, high=102, size=(n_voxel, 3)))
    my_R = []
    for idx in np.arange(n_subj):
        if idx % size == rank:
            my_R.append(R[idx])

    htfa = HTFA(K,
                n_subj=n_subj,
                max_global_iter=max_global_iter,
                max_local_iter=max_local_iter,
                max_voxel=max_voxel,
                max_tr=max_tr,
                verbose=True)
    assert htfa, "Invalid HTFA instance!"

    X = []
    for s in np.arange(n_subj):
        X.append(np.random.rand(n_voxel, n_tr))
    my_data = []
    for idx in np.arange(n_subj):
        if idx % size == rank:
            my_data.append(X[idx])

    if rank == 0:
        htfa.fit(my_data, R=my_R)
        assert True, "Root successfully running HTFA"
        assert htfa.global_prior_.shape[0] == htfa.prior_bcast_size,\
            "Invalid result of HTFA! (wrong # element in global_prior)"
        assert htfa.global_posterior_.shape[0] == htfa.prior_bcast_size,\
            "Invalid result of HTFA! (wrong # element in global_posterior)"

    else:
        htfa.fit(my_data, R=my_R)
        assert True, "worker successfully running HTFA"
        print(htfa.local_weights_.shape)
        assert htfa.local_weights_.shape[0] == n_tr * K,\
            "Invalid result of HTFA! (wrong # element in local_weights)"
        assert htfa.local_posterior_.shape[0] == htfa.prior_size,\
            "Invalid result of HTFA! (wrong # element in local_posterior)"
        R.append(all_data['R'])

n_voxel, n_tr = data[0].shape

# Run HTFA with downloaded data
from brainiak.factoranalysis.htfa import HTFA
# uncomment below line to get help message on HTFA
#help(HTFA)

K = 5
htfa = HTFA(K=K,
        n_subj=n_subj,
        max_global_iter=5,
        max_local_iter=2,
        voxel_ratio=0.5,
        tr_ratio=0.5,
        max_voxel=n_voxel,
        max_tr=n_tr,
        verbose=True)
htfa.fit(data, R)

if rank == 0:
    print("\n centers of global latent factors are:")
    print(htfa.get_centers(htfa.global_posterior_))
    print("\n widths of global latent factors are:")
    widths = htfa.get_widths(htfa.global_posterior_)
    print(widths)
    print("\n stds of global latent RBF factors are:")
    rbf_std = np.sqrt(widths/(2.0))
    print(rbf_std)
Ejemplo n.º 4
0
def test_X():
    from brainiak.factoranalysis.htfa import HTFA
    import numpy as np

    n_voxel = 100
    n_tr = 20
    K = 5
    max_global_iter = 3
    max_local_iter = 3
    max_voxel = n_voxel
    max_tr = n_tr

    R = []
    n_subj = 2
    for s in np.arange(n_subj):
        R.append(np.random.randint(2, high=102, size=(n_voxel, 3)))

    htfa = HTFA(K,
                max_global_iter=max_global_iter,
                max_local_iter=max_local_iter,
                max_voxel=max_voxel,
                max_tr=max_tr)

    X = np.random.rand(n_voxel, n_tr)
    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Input data should be a list" in str(excinfo.value)

    X = []
    # Check that does NOT run with wrong array dimension
    with pytest.raises(ValueError) as excinfo:
        htfa.fit(X, R=R)
    assert "Need at leat one subject to train the model" in str(excinfo.value)

    X = []
    X.append([1, 2, 3])
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "data should be an array" in str(excinfo.value)

    X = []
    X.append(np.random.rand(n_voxel))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "subject data should be 2D array" in str(excinfo.value)

    X = []
    for s in np.arange(n_subj):
        X.append(np.random.rand(n_voxel, n_tr))
    R = np.random.randint(2, high=102, size=(n_voxel, 3))

    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Coordinates should be a list" in str(excinfo.value)

    R = []
    R.append([1, 2, 3])
    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Each scanner coordinate matrix should be an array" in str(
        excinfo.value)

    R = []
    R.append(np.random.rand(n_voxel))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Each scanner coordinate matrix should be 2D array" in str(
        excinfo.value)

    R = []
    for s in np.arange(n_subj):
        R.append(np.random.rand(n_voxel - 1, 3))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "n_voxel should be the same in X[idx] and R[idx]" in str(
        excinfo.value)
Ejemplo n.º 5
0
def test_X():
    from brainiak.factoranalysis.htfa import HTFA
    import numpy as np

    n_voxel = 100
    n_tr = 20
    K = 5
    max_global_iter = 3
    max_local_iter = 3
    max_voxel = n_voxel
    max_tr = n_tr

    R = []
    n_subj = 2
    for s in np.arange(n_subj):
        R.append(np.random.randint(2, high=102, size=(n_voxel, 3)))

    htfa = HTFA(
        K,
        n_subj=n_subj,
        max_global_iter=max_global_iter,
        max_local_iter=max_local_iter,
        max_voxel=max_voxel,
        max_tr=max_tr)

    X = np.random.rand(n_voxel, n_tr)
    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Input data should be a list" in str(excinfo.value)

    X = []
    # Check that does NOT run with wrong array dimension
    with pytest.raises(ValueError) as excinfo:
        htfa.fit(X, R=R)
    assert "Need at leat one subject to train the model" in str(excinfo.value)

    X = []
    X.append([1, 2, 3])
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "data should be an array" in str(excinfo.value)

    X = []
    X.append(np.random.rand(n_voxel))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "subject data should be 2D array" in str(excinfo.value)

    X = []
    for s in np.arange(n_subj):
        X.append(np.random.rand(n_voxel, n_tr))
    R = np.random.randint(2, high=102, size=(n_voxel, 3))

    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert "Coordinates should be a list" in str(excinfo.value)

    R = []
    R.append([1, 2, 3])
    # Check that does NOT run with wrong data type
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert ("Each scanner coordinate matrix should be an array"
            in str(excinfo.value))

    R = []
    R.append(np.random.rand(n_voxel))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert ("Each scanner coordinate matrix should be 2D array"
            in str(excinfo.value))

    R = []
    for s in np.arange(n_subj):
        R.append(np.random.rand(n_voxel - 1, 3))
    # Check that does NOT run with wrong array dimension
    with pytest.raises(TypeError) as excinfo:
        htfa.fit(X, R=R)
    assert ("n_voxel should be the same in X[idx] and R[idx]"
            in str(excinfo.value))
Ejemplo n.º 6
0
def main():
    parser = argparse.ArgumentParser(
        description='Input: cluster info, Output: brain images')
    parser.add_argument(
        "json_file",
        help='cluster information: key=cluster_number, value=name of images')
    parser.add_argument("--K",
                        type=int,
                        default=5,
                        help='number of points on the brain')
    parser.add_argument("--n",
                        type=int,
                        default=0,
                        help='number of combinations')
    parser.add_argument("--voxel")
    parser.add_argument("--tfa")
    args = parser.parse_args()

    json_file = args.json_file
    out_dir = json_file.strip('.yml')
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)
    K = args.K
    n = args.n
    voxel = args.voxel

    # read in image name to fmri data mapping info
    onlyfiles = [
        join('/Users/hyundonglee/Desktop/Order/', f)
        for f in listdir('/Users/hyundonglee/Desktop/Order')
        if isfile(join('/Users/hyundonglee/Desktop/Order/', f)) and '.txt' in f
    ]

    fmri = defaultdict(list)

    for f in onlyfiles:
        with open(f, 'rb') as fh:
            name = f.split('.')[0]
            i = 3
            for line in fh:
                line = line.strip()
                fn = name.split('/')[-1].split(
                    'sess')[0] + 'ses-' + name.split('/')[-1].split(
                        'sess')[1].split('run')[0] + 'run-' + name.split(
                            '/')[-1].split('sess')[1].split('run')[1]
                if 'CSI_' in fn:
                    fn = fn.split('CSI_')[0] + 'CSI1_' + fn.split('CSI_')[1]
                fmri[line].append(('sub-' + fn + '_bold_stnd.nii', i, i + 3,
                                   'sub-' + fn + '_brainmask.nii'))
                i += 5

    with open(json_file, 'r') as fh:
        #cluster = json.load(fh)
        cluster = yaml.load(fh)

    i = 0
    for pair in list(combinations(cluster.keys(), 2)):
        cluster_x = pair[0]
        cluster_y = pair[1]

        # pick 1 image from each cluster
        # retrieve relevant fmri data for x,y
        for i in range(10):
            random.shuffle(cluster[cluster_x])
        for i in range(5):
            random.shuffle(cluster[cluster_y])
        for x in cluster[cluster_x]:
            fmri_x = fmri[bytes(
                x, encoding='utf-8'
            )]  # (*_bold_stnd.nii, slice_start, slice_end, *_brainmask.nii)
            if len(fmri_x) > 0:
                x_n = x.split('.')[0]
                break

        for y in cluster[cluster_y]:
            fmri_y = fmri[bytes(
                y, encoding='utf-8'
            )]  # (*_bold_stnd.nii, slice_start, slice_end, *_brainmask.nii)
            if len(fmri_y) > 0:
                if fmri_y[0][0] == fmri_x[0][0]:
                    continue
                y_n = y.split('.')[0]
                break

        #fmri_x = fmri[bytes('rep_homeoffice9.jpg', encoding='utf-8')][0]
        #fmri_y = fmri[bytes('rep_homeoffice9.jpg', encoding='utf-8')][1]
        #x_n = y_n = 'rep_homeoffice9'

        fmri_x = fmri_x[random.randint(0, len(fmri_x) - 1)]
        fmri_y = fmri_y[random.randint(0, len(fmri_y) - 1)]
        subj_x = fmri_x[0].split('_bold')[0].lstrip('sub-') + '_'
        subj_y = fmri_y[0].split('_bold')[0].lstrip('sub-') + '_'

        x_nii = image.smooth_img('/Users/hyundonglee/Desktop/Nifti/' +
                                 fmri_x[0],
                                 fwhm=7)
        y_nii = image.smooth_img('/Users/hyundonglee/Desktop/Nifti/' +
                                 fmri_y[0],
                                 fwhm=7)

        cmu_data = list(
            map(lambda n: nii2cmu(n, fmri_x[3]),
                [x_nii.slicer[:, :, :, fmri_x[1]:fmri_x[2]]]))
        cmu_data.extend(
            list(
                map(lambda n: nii2cmu(n, fmri_y[3]),
                    [y_nii.slicer[:, :, :, fmri_y[1]:fmri_y[2]]])))

        if args.voxel:
            print("saving voxel_locations")
            hyp.plot(cmu_data[0]['R'],
                     'k.',
                     save_path=out_dir + '/voxel_locations_' + subj_x + x_n +
                     '.png')
            hyp.plot(cmu_data[1]['R'],
                     'k.',
                     save_path=out_dir + '/voxel_locations_' + subj_y + y_n +
                     '.png')
            print("save voxel_locations complete")

        # Convert between (timepoints by voxels) and (voxels by timepoints) data matrices
        htfa_data = list(map(lambda x: {'R': x['R'], 'Z': x['Y'].T}, cmu_data))
        nvoxels, ntimepoints = htfa_data[0]['Z'].shape

        if args.tfa:
            # Use TFA to find network hubs in one subject's data
            print("running TFA")
            tfa_x = TFA(K=K,
                        max_num_voxel=int(nvoxels * 0.05),
                        max_num_tr=int(ntimepoints),
                        verbose=False)
            tfa_x.fit(htfa_data[0]['Z'], htfa_data[0]['R'])

            #plot the hubs on a glass brain!
            niplot.plot_connectome(np.eye(K),
                                   tfa_x.get_centers(tfa_x.local_posterior_),
                                   node_color='k',
                                   output_file=out_dir + '/network_hubs_' +
                                   subj_x + x_n + '.png')

            # Visualizing how the brain images are simplified using TFA
            original_image = cmu2nii(htfa_data[0]['Z'][:, 0].T,
                                     htfa_data[0]['R'], x_nii)
            niplot.plot_glass_brain(original_image,
                                    plot_abs=False,
                                    output_file=out_dir +
                                    '/simplified_by_TFA_' + subj_x + x_n +
                                    '.png')

            connectome = 1 - sd.squareform(sd.pdist(tfa_x.W_), 'correlation')
            niplot.plot_connectome(connectome,
                                   tfa_x.get_centers(tfa_x.local_posterior_),
                                   node_color='k',
                                   edge_threshold='75%',
                                   output_file=out_dir + '/connectome' +
                                   subj_x + x_n + '.png')

            tfa_y = TFA(K=K,
                        max_num_voxel=int(nvoxels * 0.05),
                        max_num_tr=int(ntimepoints),
                        verbose=False)
            tfa_y.fit(htfa_data[1]['Z'], htfa_data[1]['R'])

            #plot the hubs on a glass brain!
            niplot.plot_connectome(np.eye(K),
                                   tfa_y.get_centers(tfa_y.local_posterior_),
                                   node_color='k',
                                   output_file=out_dir + '/network_hubs_' +
                                   subj_y + y_n + '.png')

            # Visualizing how the brain images are simplified using TFA
            original_image = cmu2nii(htfa_data[1]['Z'][:, 0].T,
                                     htfa_data[1]['R'], y_nii)
            niplot.plot_glass_brain(original_image,
                                    plot_abs=False,
                                    output_file=out_dir +
                                    '/simplified_by_TFA_' + subj_y + y_n +
                                    '.png')

            connectome = 1 - sd.squareform(sd.pdist(tfa_y.W_), 'correlation')
            niplot.plot_connectome(connectome,
                                   tfa_y.get_centers(tfa_y.local_posterior_),
                                   node_color='k',
                                   edge_threshold='75%',
                                   output_file=out_dir + '/connectome' +
                                   subj_y + y_n + '.png')

            print("TFA complete")

        print("running HTFA")
        htfa = HTFA(K=K,
                    n_subj=len(htfa_data),
                    max_global_iter=5,
                    max_local_iter=2,
                    voxel_ratio=0.5,
                    tr_ratio=0.5,
                    max_voxel=int(nvoxels * 0.05),
                    max_tr=int(ntimepoints))

        htfa.fit(list(map(lambda x: x['Z'], htfa_data)),
                 list(map(lambda x: x['R'], htfa_data)))

        #set the node display properties
        colors = np.repeat(np.vstack([[0, 0, 0],
                                      sns.color_palette(
                                          "Spectral", htfa.n_subj)]),
                           K,
                           axis=0)
        colors = list(
            map(lambda x: x[0],
                np.array_split(colors, colors.shape[0],
                               axis=0)))  #make colors into a list
        sizes = np.repeat(
            np.hstack([np.array(50),
                       np.array(htfa.n_subj * [20])]), K)

        #extract the node locations from the fitted htfa model
        global_centers = htfa.get_centers(htfa.global_posterior_)
        local_centers = list(
            map(htfa.get_centers,
                np.array_split(htfa.local_posterior_, htfa.n_subj)))
        centers = np.vstack([global_centers, np.vstack(local_centers)])

        #make the plot
        niplot.plot_connectome(np.eye(K * (1 + htfa.n_subj)),
                               centers,
                               node_color=colors,
                               node_size=sizes,
                               output_file=out_dir + '/htfa_' + subj_x + x_n +
                               '_' + subj_y + y_n + '.png')

        n_timepoints = list(
            map(lambda x: x['Z'].shape[1],
                htfa_data))  #number of timepoints for each person
        inds = np.hstack([0, np.cumsum(np.multiply(K, n_timepoints))])
        W = list(
            map(
                lambda i: htfa.local_weights_[inds[i]:inds[i + 1]].reshape(
                    [K, n_timepoints[i]]).T, np.arange(htfa.n_subj)))

        static_isfc = dynamic_ISFC(W)
        niplot.plot_connectome(sd.squareform(static_isfc[0, :]),
                               global_centers,
                               node_color='k',
                               edge_threshold='75%',
                               output_file=out_dir + '/static_isfc_' + subj_x +
                               x_n + '_' + subj_y + y_n + '.png')

        print("HTFA complete")

        i += 1
        if n != 0 and i == n:
            break
Ejemplo n.º 7
0
                    test_recon_R.append(R[s][test_voxel_indices])

                htfa = HTFA(K=Ks[idx],
                            max_global_iter=5,
                            max_local_iter=2,
                            n_subj=n_subj,
                            nlss_method=nlss_method,
                            nlss_loss=nlss_loss,
                            tr_solver=tr_solver,
                            upper_ratio=upper_ratio,
                            lower_ratio=lower_ratio,
                            max_tr=max_sample_tr,
                            max_voxel=max_sample_voxel,
                            comm=htfa_comm,
                            verbose=True)
                htfa.fit(train_data, R)

                for s in range(n_local_subj):
                    #get posterior for each subject
                    subj_idx = mapping[str(s)]
                    start_idx = s * htfa.prior_size
                    end_idx = (s + 1) * htfa.prior_size
                    local_posteiror = htfa.local_posterior_[start_idx:end_idx]
                    local_centers = htfa.get_centers(local_posteiror)
                    local_widths = htfa.get_widths(local_posteiror)

                    htfa.n_dim = n_dim
                    htfa.cov_vec_size = np.sum(np.arange(htfa.n_dim) + 1)
                    htfa.map_offset = htfa.get_map_offset()
                    #training happens on all voxels, but part of TRs
                    unique_R_all, inds_all = htfa.get_unique_R(R[s])
Ejemplo n.º 8
0
        R.append(all_data['R'])

n_voxel, n_tr = data[0].shape

# Run HTFA with downloaded data
from brainiak.factoranalysis.htfa import HTFA
# uncomment below line to get help message on HTFA
#help(HTFA)

K = 5
htfa = HTFA(K=K,
            n_subj=n_subj,
            max_global_iter=5,
            max_local_iter=2,
            voxel_ratio=0.5,
            tr_ratio=0.5,
            max_voxel=n_voxel,
            max_tr=n_tr,
            verbose=True)
htfa.fit(data, R)

if rank == 0:
    print("\n centers of global latent factors are:")
    print(htfa.get_centers(htfa.global_posterior_))
    print("\n widths of global latent factors are:")
    widths = htfa.get_widths(htfa.global_posterior_)
    print(widths)
    print("\n stds of global latent RBF factors are:")
    rbf_std = np.sqrt(widths / (2.0))
    print(rbf_std)
Ejemplo n.º 9
0
                    test_recon_R.append(R[s][test_voxel_indices])

                htfa = HTFA(K=Ks[idx],
                        max_global_iter=5,
                        max_local_iter=2,
                        n_subj=n_subj,
                        nlss_method=nlss_method,
                        nlss_loss=nlss_loss,
                        tr_solver=tr_solver,
                        upper_ratio=upper_ratio,
                        lower_ratio=lower_ratio,
                        max_tr=max_sample_tr,
                        max_voxel=max_sample_voxel,
                        comm=htfa_comm,
                        verbose=True)
                htfa.fit(train_data, R)

                for s in range(n_local_subj):
                    #get posterior for each subject
                    subj_idx = mapping[str(s)]
                    start_idx = s * htfa.prior_size
                    end_idx = (s + 1) * htfa.prior_size
                    local_posteiror = htfa.local_posterior_[start_idx:end_idx]
                    local_centers = htfa.get_centers(local_posteiror)
                    local_widths = htfa.get_widths(local_posteiror)

                    htfa.n_dim = n_dim
                    htfa.cov_vec_size = np.sum(np.arange(htfa.n_dim) + 1)
                    htfa.map_offset = htfa.get_map_offset()
                    #training happens on all voxels, but part of TRs
                    unique_R_all, inds_all = htfa.get_unique_R(R[s])