def window_matrices(rsp, movs, n_lag): def movie_window_matrix(mov, n_lag): LY, LX, T = mov.shape p_mov = np.pad(mov, ((0, 0), (0, 0), (n_lag - 1, 0)), mode='constant') mat = np.empty((T, LY * LX * n_lag)) for t in range(T): mat[t, :] = p_mov[:, :, t:t + n_lag].flatten() return mat def response_window_matrix(rsp, n_lag): S, N, T, R = rsp.data.shape mat = np.empty((T * R * S, N)) i = 0 for s in range(S): for r in range(R): for t in range(T): mat[i, :] = rsp.data[s, :, t, r] i += 1 return mat mov_mats = [movie_window_matrix(m, n_lag) for m in movs] rsp_mat = response_window_matrix(rsp, n_lag) n_trials = rsp.data.shape[3] mov_mat = np.vstack( [np.vstack([m for i in range(n_trials)]) for m in mov_mats]) return shared_dataset((mov_mat, rsp_mat))
def window_matrices(rsp, movs, n_lag): def movie_window_matrix(mov, n_lag): LY, LX, T = mov.shape p_mov = np.pad(mov, ((0,0), (0,0), (n_lag-1,0)), mode='constant') mat = np.empty((T, LY*LX*n_lag)) for t in range(T): mat[t,:] = p_mov[:,:,t:t+n_lag].flatten() return mat def response_window_matrix(rsp, n_lag): S,N,T,R = rsp.data.shape mat = np.empty((T*R*S,N)) i = 0 for s in range(S): for r in range(R): for t in range(T): mat[i,:] = rsp.data[s,:,t,r] i += 1 return mat mov_mats = [movie_window_matrix(m, n_lag) for m in movs] rsp_mat = response_window_matrix(rsp, n_lag) n_trials = rsp.data.shape[3] mov_mat = np.vstack([np.vstack([m for i in range(n_trials)]) for m in mov_mats]) return shared_dataset((mov_mat, rsp_mat))
def load_data(n_ex, n_features_in, n_features_out): print("Creating data...") data_input = np.random.random((n_ex, n_features_in)) weights = (1.0 / n_features_in) * np.ones((n_features_in, n_features_out)) print("*\n") print(weights) print("\n*") data_output = np.dot(data_input, weights) + 0.1 * np.random.random((n_ex, n_features_out)) + 1.0 t1, t2 = int(0.6 * n_ex), int(0.8 * n_ex) train_input, train_output = data_input[:t1, :], data_output[:t1, :] val_input, val_output = data_input[t1:t2, :], data_output[t1:t2, :] test_input, test_output = data_input[t2:, :], data_output[t2:, :] print("Data created...") test_x, test_y = shared_dataset((test_input, test_output)) train_x, train_y = shared_dataset((train_input, train_output)) val_x, val_y = shared_dataset((val_input, val_output)) rval = [(train_x, train_y), (val_x, val_y), (test_x, test_y)] return rval
def load_data(n_ex, n_features_in, n_features_out): print('Creating data...') data_input = np.random.random((n_ex, n_features_in)) weights = (1.0 / n_features_in) * np.ones((n_features_in, n_features_out)) print('*\n') print(weights) print('\n*') data_output = np.dot(data_input, weights)\ + 0.1 * np.random.random((n_ex, n_features_out)) + 1.0 t1, t2 = int(0.6 * n_ex), int(0.8 * n_ex) train_input, train_output = data_input[:t1, :], data_output[:t1, :] val_input, val_output = data_input[t1:t2, :], data_output[t1:t2, :] test_input, test_output = data_input[t2:, :], data_output[t2:, :] print('Data created...') test_x, test_y = shared_dataset((test_input, test_output)) train_x, train_y = shared_dataset((train_input, train_output)) val_x, val_y = shared_dataset((val_input, val_output)) rval = [(train_x, train_y), (val_x, val_y), (test_x, test_y)] return rval