Exemple #1
0
def fit_mlp_models(n_hidden=25,
                   learning_rate=0.15,
                   n_epochs=100,
                   batch_size=600,
                   L1_reg=0.0e-9,
                   L2_reg=0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for' % (n_hidden,
                                                               n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type,
                         movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []

    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response,
                                                            movies,
                                                            'even',
                                                            train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp,
                                                            tr_mov,
                                                            'even',
                                                            train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name,
                                n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs,
                                batch_size=batch_size,
                                L1_reg=L1_reg,
                                L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)

        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir, 'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)
        pred[i,:] = net.run(X_val[i])
    print mean_absolute_error(np.array(Y_val), pred)

    return net

################################################################################
################################################################################
exp_type = 'natural'
movie_type = 'movie'
spatial_downsample_factor = 4
# time_downsample_factor = 5
n_lag = 6
split_type = 'even'
saved_nns_dir = './temp/nets'

responses = load_responses(exp_type)
movies = load_movies(exp_type, movie_type,
                     downsample_factor=spatial_downsample_factor)
train_test_splits = map(lambda r: train_test_split(r, movies, split_type,
                                                   train_frac=0.7,
                                                   to_leave_out=0),
                        responses)

nns = []
for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name
    
    print 'Splitting out training and test data...'
    tr_rsp, val_rsp, tr_mov, val_mov = train_test_splits[i]
    
from src.io import load_responses, load_movies
from src.data_manip_utils import train_test_split, train_test_split_grating
from src.data_manip_utils import confusion_matrix, confusion_matrix_grating
from src.data_manip_utils import cm_goodness
from src.data_manip_utils import smooth_responses

################################################################################
################################################################################
exp_type = 'natural'
movie_type = 'movie'
downsample_factor = 4
split_type = 'movie_index_list'
n_clusters = 5

responses = map(smooth_responses, load_responses(exp_type))
n_movies = [m.data.shape[0]/5 for m in responses]
train_idxs = [range(n) for n in n_movies]
k = 1
test_idxs = [range(k * n, (k+1) * n) for n in n_movies]

"""
for r in responses:
    r.clustering = NeuronClustering(n_clusters, signal_correlation)
    r.clustering.fit(r.data)
    r.data = r.clustering.cluster_response(r.data)
"""

# movies = load_movies(exp_type, movie_type, downsample_factor=downsample_factor)
movies = [None] * 25
    from src.params.naturalmovies.stimulus_params import CA_SAMPLING_RATE
elif exp_type == 'grating':
    from src.params.grating.datafile_params import PLOTS_DIR
    from src.params.grating.stimulus_params import CA_SAMPLING_RATE

movie_type = 'movie'
downsample_factor = 8
n_lag = 13
n_clusters = 4
n_components = 16
split_type = 'loo'
model_name = 'linear-regression'
model_type = 'reverse'
regularisation = None

responses = map(smooth_responses, load_responses(exp_type))
movies = load_movies(exp_type, movie_type, downsample_factor=downsample_factor)

for i, response in enumerate(responses):
    if i > 0:
        break

    name = response.name
    print 'Mouse %s' % name
    
    print 'Splitting out training and test data...'
    tr_rsp, te_rsp, tr_mov, te_mov = train_test_split(response, movies,
                                                      split_type,
                                                      train_frac=0.7,
                                                      to_leave_out=0)
def fit_mlp_models(n_hidden=25, learning_rate=0.15, n_epochs=100,
                   batch_size=600, L1_reg=0.0e-9, L2_reg = 0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for'%(n_hidden,n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type, movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []
    
    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response, movies,
                                                        'even', train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp, tr_mov,
                                                        'even', train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name, n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs, batch_size=batch_size,
                                L1_reg=L1_reg, L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)
        
        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir,'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)
Exemple #6
0
    print mean_absolute_error(np.array(Y_val), pred)

    return net


################################################################################
################################################################################
exp_type = 'natural'
movie_type = 'movie'
spatial_downsample_factor = 4
# time_downsample_factor = 5
n_lag = 6
split_type = 'even'
saved_nns_dir = './temp/nets'

responses = load_responses(exp_type)
movies = load_movies(exp_type,
                     movie_type,
                     downsample_factor=spatial_downsample_factor)
train_test_splits = map(
    lambda r: train_test_split(
        r, movies, split_type, train_frac=0.7, to_leave_out=0), responses)

nns = []
for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name

    print 'Splitting out training and test data...'
    tr_rsp, val_rsp, tr_mov, val_mov = train_test_splits[i]
from src.io import load_responses
from src.response import Response
from src.reliability import reliability
from src.correlation import signal_correlation, noise_correlation
from src.clustering import NeuronClustering
from src.data_manip_utils import smooth_responses

n_clusters = 3
exp_type = 'natural'
if exp_type == 'natural':
    from src.params.naturalmovies.datafile_params import PLOTS_DIR
elif exp_type == 'grating':
    from src.params.grating.datafile_params import PLOTS_DIR

data = load_responses(exp_type)
for index, m in enumerate(data):
    print 'Mouse %s' % m.name
    if exp_type == 'natural':
        m.data = m.data[:5,:,:,:]

    data[index] = smooth_responses(m)
    m = data[index]
    
    m.clustering = NeuronClustering(n_clusters, signal_correlation)
    m.clustering.fit(m.data)
    m.data = m.clustering.divnorm(m.data)

    S, N, L, R = m.data.shape

    # Computing signal correlation.