################################################################################
################################################################################
exp_type = 'natural'
movie_type = 'movie'
spatial_downsample_factor = 4
# time_downsample_factor = 5
n_lag = 6
split_type = 'even'
saved_nns_dir = './temp/nets'

responses = load_responses(exp_type)
movies = load_movies(exp_type, movie_type,
                     downsample_factor=spatial_downsample_factor)
train_test_splits = map(lambda r: train_test_split(r, movies, split_type,
                                                   train_frac=0.7,
                                                   to_leave_out=0),
                        responses)

nns = []
for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name
    
    print 'Splitting out training and test data...'
    tr_rsp, val_rsp, tr_mov, val_mov = train_test_splits[i]
    
    tr_rsp = smooth_responses(tr_rsp)
    val_rsp = smooth_responses(val_rsp)

    network = fit_NN(tr_mov, tr_rsp, val_mov, val_rsp, n_lag)
예제 #2
0
def fit_mlp_models(n_hidden=25,
                   learning_rate=0.15,
                   n_epochs=100,
                   batch_size=600,
                   L1_reg=0.0e-9,
                   L2_reg=0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for' % (n_hidden,
                                                               n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type,
                         movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []

    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response,
                                                            movies,
                                                            'even',
                                                            train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp,
                                                            tr_mov,
                                                            'even',
                                                            train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name,
                                n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs,
                                batch_size=batch_size,
                                L1_reg=L1_reg,
                                L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)

        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir, 'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)
k = 1
test_idxs = [range(k * n, (k+1) * n) for n in n_movies]

"""
for r in responses:
    r.clustering = NeuronClustering(n_clusters, signal_correlation)
    r.clustering.fit(r.data)
    r.data = r.clustering.cluster_response(r.data)
"""

# movies = load_movies(exp_type, movie_type, downsample_factor=downsample_factor)
movies = [None] * 25

train_test_splits = [train_test_split(r, movies, split_type,
                                      train_frac=0.7,
                                      to_leave_out=0,
                                      train_idxs=train_idxs[i],
                                      test_idxs=test_idxs[i])
                     for i, r in enumerate(responses)]

for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name

    print 'Splitting out training and test data...'
    tr_rsp, te_rsp, tr_mov, te_mov = train_test_splits[i]
    # tr_rsp, te_rsp = train_test_splits[i]
    
    print 'Fitting template-matching model...'
    model = AverageTemplate()
    model.fit(tr_rsp)
model_type = 'reverse'
regularisation = None

responses = map(smooth_responses, load_responses(exp_type))
movies = load_movies(exp_type, movie_type, downsample_factor=downsample_factor)

for i, response in enumerate(responses):
    if i > 0:
        break

    name = response.name
    print 'Mouse %s' % name
    
    print 'Splitting out training and test data...'
    tr_rsp, te_rsp, tr_mov, te_mov = train_test_split(response, movies,
                                                      split_type,
                                                      train_frac=0.7,
                                                      to_leave_out=0)

    print 'Fitting model parameters for %s %s...' % (model_type, model_name)
    if model_name == 'cca':
        model = LinearReconstruction('cca', model_type, n_lag=n_lag,
                                     n_clusters=n_clusters,
                                     n_components = n_components)
    elif model_name == 'linear-regression':
        model = LinearReconstruction('linear-regression', model_type,
                                     n_clusters=n_clusters, n_lag=n_lag,
                                     regularisation=regularisation)

    model.fit(tr_rsp, tr_mov)

    print 'Reconstructing stimulus movies...'
예제 #5
0
downsample_factor = 4
split_type = 'even'
n_clusters = 5

responses = map(smooth_responses, load_responses(exp_type))

for r in responses:
    r.clustering = NeuronClustering(n_clusters, signal_correlation)
    r.clustering.fit(r.data)
    r.data = r.clustering.cluster_response(r.data)

# movies = load_movies(exp_type, movie_type, downsample_factor=downsample_factor)
movies = [None] * len(responses)

train_test_splits = map(
    lambda r: train_test_split(
        r, movies, split_type, train_frac=0.7, to_leave_out=0), responses)

for i, response in enumerate(responses):
    name = response.name
    print 'Mouse %s' % name

    print 'Splitting out training and test data...'
    tr_rsp, te_rsp, tr_mov, te_mov = train_test_splits[i]
    # tr_rsp, te_rsp = train_test_splits[i]

    print 'Fitting template-matching model...'
    model = AverageTemplate()
    model.fit(tr_rsp)
    print 'Decoding movie indices from test responses...'
    pred_movies = model.predict(te_rsp)
    cm = confusion_matrix(pred_movies)
def fit_mlp_models(n_hidden=25, learning_rate=0.15, n_epochs=100,
                   batch_size=600, L1_reg=0.0e-9, L2_reg = 0.0e-9):
    exp_type = 'natural'
    movie_type = 'movie'
    spatial_downsample_factor = 4
    n_lag = 13
    saved_models_dir = './temp/mlp-models-%d' % n_hidden
    predicted_responses_dir = './temp/mlp-predicted-responses-%s' % n_hidden
    if not os.path.isdir(saved_models_dir):
        os.makedirs(saved_models_dir)
    else:
        shutil.rmtree(saved_models_dir)
        os.makedirs(saved_models_dir)
    if not os.path.isdir(predicted_responses_dir):
        os.makedirs(predicted_responses_dir)
    else:
        shutil.rmtree(predicted_responses_dir)
        os.makedirs(predicted_responses_dir)

    print '%d hidden layer neurons, %d epochs to train for'%(n_hidden,n_epochs)
    responses = load_responses(exp_type)
    movies = load_movies(exp_type, movie_type,
                         downsample_factor=spatial_downsample_factor)
    mlp_training_errors = []
    
    for i, response in enumerate(responses):
        name = response.name
        print 'Mouse %s' % name

        print 'Splitting out training and test data...'
        tr_rsp, tst_rsp, tr_mov, tst_mov = train_test_split(response, movies,
                                                        'even', train_frac=0.8)

        print 'Splitting out training and validation data...'
        tr_rsp, val_rsp, tr_mov, val_mov = train_test_split(tr_rsp, tr_mov,
                                                        'even', train_frac=0.9)

        tr_rsp = smooth_responses(tr_rsp)
        val_rsp = smooth_responses(val_rsp)
        tst_rsp = smooth_responses(tst_rsp)

        train_set_x, train_set_y = window_matrices(tr_rsp, tr_mov, n_lag)
        valid_set_x, valid_set_y = window_matrices(val_rsp, val_mov, n_lag)
        test_set_x, test_set_y = window_matrices(tst_rsp, tst_mov, n_lag)

        model = RegressionModel(model_name=name, n_hidden=n_hidden,
                                learning_rate=learning_rate,
                                n_epochs=n_epochs, batch_size=batch_size,
                                L1_reg=L1_reg, L2_reg=L2_reg)

        model.setup_with_data([(train_set_x, train_set_y),
                               (valid_set_x, valid_set_y),
                               (test_set_x, test_set_y)])
        test_error = model.train()
        mlp_training_errors.append(test_error)
        
        predicted = model.y_pred()
        np.save(os.path.join(predicted_responses_dir, 'pred_%s' % name),
                predicted)
        with open(os.path.join(saved_models_dir,'mlp_%s' % name), 'wb') as f:
            pickle.dump(model.regression.params, f)

    with open(os.path.join(saved_models_dir, 'train_errors'), 'wb') as f:
        pickle.dump(mlp_training_errors, f)