Exemplo n.º 1
0
import numpy as np
import os

l1 = 2
l2 = 2
modelpath = './your_models_SSAE/'

if not os.path.exists(modelpath):
    os.makedirs(modelpath)

feature = 'LPS'
#feature = 'LogMFE'
print('Feature used is {}'.format(feature))

print('Loading data...')
data = my_functions.load_data(l1, l2, feature)
inp_train, inp_dev, inp_test, op_reg_train, op_reg_dev, op_reg_test, feat_dim_X, feat_dim_Y = data
print('Data loaded')

###############################################
# Configurations
#pDrop = 0.2; BN = 0 # Fraction of the input units to drop.
#pDrop = 0; BN = 'a'
pDrop = 0
BN = 'b'
#pDrop = 0; BN = 0
#HL = [512,256,10,256,512]
HL = [1024, 512, 10, 512, 1024]

activations = ['tanh', 'tanh', 'tanh', 'tanh', 'tanh', 'linear']
act = 'tttttl'
Exemplo n.º 2
0
n_phi = 100  # 100 (b) in the manuscript
dropout = 0.0  # let's apply dropout in the second layer

# Training Parameters
learning_rate = 0.05
batch_size = 64
seed = 10003
early_stopping = True
n_epochs = 100
display = 5  #100

# ----- prepare data
k = 1  # work on 1st k fold for testing purpose

train_split, test_split = create_k_splits()
X_train, y_train = load_data('train', train_split[k], test_split[k],
                             SummaryMeasure)
X_test, y_test = load_data('test', train_split[k], test_split[k],
                           SummaryMeasure)

# prepare mask and masker
mask = np.load("brain_mask", allow_pickle=True)
masker = prepare_masker(X_train)
# -------
# flatten data for mlp
X_train = X_train.reshape(len(X_train), -1)  #109350
X_test = X_test.reshape(len(X_test), -1)

X_train = X_train[:, mask.reshape(-1)]  #28542
X_test = X_test[:, mask.reshape(-1)]

# Construct MLP with Feature Grouping