Exemple #1
0
for j, i in enumerate(ids):
    with h5py.File(
            os.path.join(DATA_DIR, 'nest_output', i,
                         'LFP_firing_rate.h5')) as f:
        lfp = f['data'][:, 150:]
    lfps[j] = lfp

lfps = np.array(lfps)
labels = np.array(labels)

## Get PSDs
fs, psds = calc_psds(lfps)
del lfps

## Remove cases of simulations where no neurons spiked
psds, labels = remove_zero_lfps(psds, labels)

test_psds = psds[:10000]
test_labels = labels[:10000]
training_psds = psds[10000:]
training_labels = labels[10000:]

## Hyperparams
batch_size = 100
epochs = 400
lr = 1e-3

x_train = training_psds
y_train = training_labels.copy()
x_test = test_psds
y_test = test_labels.copy()
for j, i in enumerate(ids[:10000]):
    with h5py.File(
            os.path.join(DATA_DIR_LARGE, 'nest_output', i,
                         'LFP_firing_rate.h5')) as f:
        lfp = f['data'][:, 150:]
    lfps[j] = lfp

lfps = np.array(lfps)
labels_large = np.array(labels_large)

## Get PSDs
fs, psds_large = calc_psds(lfps)
del lfps

## Remove cases of simulations where no neurons spiked
psds_large, labels_large = remove_zero_lfps(psds_large, labels_large)

psds_large = np.swapaxes(psds_large, 1, 2)

## Rescale labels
labels_large_rescaled = labels_large - np.array([[0.8, 3.5, 0.05]])
labels_large_rescaled /= np.array([[3.2, 4.5, 0.35]])


## CNN model
def set_up_model(x_train, lr, n_dense=128, output=3):
    keras.backend.clear_session()
    input_shape = (x_train.shape[1], x_train.shape[2])
    inputs = keras.layers.Input(shape=input_shape)
    x = keras.layers.Conv1D(20,
                            kernel_size=(12),
Exemple #3
0
for j, i in enumerate(ids[:10000]):
    with h5py.File(
            os.path.join(DATA_DIR_LARGE, 'nest_output', i,
                         'LFP_firing_rate.h5')) as f:
        lfp = f['data'][:, 150:]
    lfps[j] = lfp

lfps = np.array(lfps)
labels_large = np.array(labels_large)

## Get PSDs
fs, psds_large = calc_psds(lfps)
del lfps

## Remove cases of simulations where no neurons spiked
psds_large, labels_large = remove_zero_lfps(psds_large, labels_large)

psds_large = np.swapaxes(psds_large, 1, 2)

#### Load small parameterspace LFPs
ids, labels_small = read_ids_parameters(
    os.path.join(DATA_DIR_SMALL, 'id_parameters.txt'))

## Test sims
ids = ids[:10000]
labels_small = labels_small[:10000]

lfps = np.zeros((len(ids), 6, 2851), dtype=np.float32)
for j, i in enumerate(ids[:10000]):
    with h5py.File(
            os.path.join(DATA_DIR_SMALL, 'nest_output', i,
print('loading grid sampled data')
ids, grid_labels = read_ids_parameters(os.path.join(DATA_DIR_GRID, 'id_parameters.txt'))
lfps = np.zeros((len(ids), 6, 2851), dtype=np.float32)
for j, i in enumerate(ids):
    with h5py.File(os.path.join(DATA_DIR_GRID, 'nest_output', i, 'LFP_firing_rate.h5')) as f:
        lfp = f['data'][:,150:]
    lfps[j] = lfp

grid_labels = np.array(grid_labels)

## Get PSDs
fs, grid_psds = calc_psds(lfps)
del lfps

## Remove cases of simulations where no neurons spiked
psds, labels = remove_zero_lfps(grid_psds, grid_labels)


### RANDOMLY SAMPLED DATA
# Load LFPs
print('loading randomly sampled data')
ids, random_labels = read_ids_parameters(os.path.join(DATA_DIR_RANDOM, 'id_parameters.txt'))
lfps = np.zeros((len(ids), 6, 2851), dtype=np.float32)
for j, i in enumerate(ids):
    print('%d'%j, end='\r')
    with h5py.File(os.path.join(DATA_DIR_RANDOM, 'nest_output', i, 'LFP_firing_rate.h5')) as f:
        lfp = f['data'][:,150:]
    lfps[j] = lfp

print('converting')
random_labels = np.array(random_labels)