movie_type = 'movie'
spatial_downsample_factor = 4
# time_downsample_factor = 5
n_lag = 6
split_type = 'even'
saved_nns_dir = './temp/nets'

responses = load_responses(exp_type)
movies = load_movies(exp_type, movie_type,
                     downsample_factor=spatial_downsample_factor)
train_test_splits = map(lambda r: train_test_split(r, movies, split_type,
                                                   train_frac=0.7,
                                                   to_leave_out=0),
                        responses)

nns = []
for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name
    
    print 'Splitting out training and test data...'
    tr_rsp, val_rsp, tr_mov, val_mov = train_test_splits[i]
    
    tr_rsp = smooth_responses(tr_rsp)
    val_rsp = smooth_responses(val_rsp)

    network = fit_NN(tr_mov, tr_rsp, val_mov, val_rsp, n_lag)
    nns.append(network)

    network.save(os.path.join(saved_nns_dir, '%d.net' % i))
Пример #2
0
def fit_mlp_models(n_hidden=25,
                   learning_rate=0.15,
                   n_epochs=100,
                   batch_size=600,
                   L1_reg=0.0e-9,
                   L2_reg=0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for' % (n_hidden,
                                                               n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type,
                         movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []

    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response,
                                                            movies,
                                                            'even',
                                                            train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp,
                                                            tr_mov,
                                                            'even',
                                                            train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name,
                                n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs,
                                batch_size=batch_size,
                                L1_reg=L1_reg,
                                L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)

        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir, 'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)
def fit_mlp_models(n_hidden=25, learning_rate=0.15, n_epochs=100,
                   batch_size=600, L1_reg=0.0e-9, L2_reg = 0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for'%(n_hidden,n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type, movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []
    
    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response, movies,
                                                        'even', train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp, tr_mov,
                                                        'even', train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name, n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs, batch_size=batch_size,
                                L1_reg=L1_reg, L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)
        
        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir,'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)
Пример #4
0
movie_type = 'movie'
spatial_downsample_factor = 4
# time_downsample_factor = 5
n_lag = 6
split_type = 'even'
saved_nns_dir = './temp/nets'

responses = load_responses(exp_type)
movies = load_movies(exp_type,
                     movie_type,
                     downsample_factor=spatial_downsample_factor)
train_test_splits = map(
    lambda r: train_test_split(
        r, movies, split_type, train_frac=0.7, to_leave_out=0), responses)

nns = []
for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name

    print 'Splitting out training and test data...'
    tr_rsp, val_rsp, tr_mov, val_mov = train_test_splits[i]

    tr_rsp = smooth_responses(tr_rsp)
    val_rsp = smooth_responses(val_rsp)

    network = fit_NN(tr_mov, tr_rsp, val_mov, val_rsp, n_lag)
    nns.append(network)

    network.save(os.path.join(saved_nns_dir, '%d.net' % i))
Пример #5
0
# Preprocessing
from src.data_manip_utils import smooth_responses

locs_dirn = [os.path.join(DATA_DIR, '%s_dir.npy' % c) for c in MICE_NAMES]
data_dirn = map(lambda (n, loc): Response(n, loc), zip(MICE_NAMES, locs_dirn))
locs_ori = [os.path.join(DATA_DIR, '%s_ori.npy' % c) for c in MICE_NAMES]
data_ori = map(lambda (n, loc): Response(n, loc), zip(MICE_NAMES, locs_ori))
dirs_rad = np.radians(DIRECTIONS)
sigma0 = 2 * np.pi / len(DIRECTIONS)  # initial tuning curve width

for index, (m_dir, m_ori) in enumerate(zip(data_dirn, data_ori)):
    name = MICE_NAMES[index]
    print 'Mouse %s' % name

    # Smooth the responses first.
    m_dir = smooth_responses(m_dir)
    m_ori = smooth_responses(m_ori)

    N = m_dir.data.shape[1]

    # Get average response over all trials, time
    m_dir.avg = np.mean(m_dir.data, axis=(2, 3))
    m_ori.avg = np.mean(m_ori.data, axis=(2, 3))

    # Obtain tuning curves
    init_thetas = [
        dirs_rad[np.argmax(m_dir.avg[:, i])] % np.pi for i in range(N)
    ]
    init_cs = np.min(m_dir.avg, axis=0)
    init_ws = np.max(m_dir.avg, axis=0) - np.min(m_dir.avg, axis=0)
    init_sigmas = np.ones(N) * sigma0
Пример #6
0
from src.data_manip_utils import smooth_responses

n_clusters = 3
exp_type = 'natural'
if exp_type == 'natural':
    from src.params.naturalmovies.datafile_params import PLOTS_DIR
elif exp_type == 'grating':
    from src.params.grating.datafile_params import PLOTS_DIR

data = load_responses(exp_type)
for index, m in enumerate(data):
    print 'Mouse %s' % m.name
    if exp_type == 'natural':
        m.data = m.data[:5,:,:,:]

    data[index] = smooth_responses(m)
    m = data[index]
    
    m.clustering = NeuronClustering(n_clusters, signal_correlation)
    m.clustering.fit(m.data)
    m.data = m.clustering.divnorm(m.data)

    S, N, L, R = m.data.shape

    # Computing signal correlation.
    m.signal_corr = signal_correlation(m.data)

    # Computing noise correlation.
    m.noise_corr = noise_correlation(m.data)

    # Hierarchical clustering to rearrange the correlation matrix.