import os, sys
import numpy as np
import helper
import tensorflow as tf
from deepomics import neuralnetwork as nn
from deepomics import utils, fit

#------------------------------------------------------------------------------------------------

all_models = [
    'cnn_2', 'cnn_4', 'cnn_10', 'cnn_25', 'cnn_50', 'cnn_100', 'cnn_50_2',
    'cnn9_4', 'cnn9_25', 'cnn3_50', 'cnn3_2'
]

# save path
results_path = utils.make_directory('../results', 'synthetic')
params_path = utils.make_directory(results_path, 'model_params')

# load dataset
data_path = '../data/synthetic_dataset.h5'
train, valid, test = helper.load_synthetic_dataset(data_path)

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None
output_shape = [None, train['targets'].shape[1]]

# loop through models
for model_name in all_models:
    print('model: ' + model_name)
Ejemplo n.º 2
0
import helper
np.random.seed(247)
tf.set_random_seed(247)

#---------------------------------------------------------------------------------------------------------

all_models = ['DistNet', 'LocalNet']
dropout_status = [True, False]
l2_status = [True, False]
bn_status = [True, False]
noise_status = [False, True, False]
adv_status = [False, False, True]

# save path
results_path = '../results'
params_path = utils.make_directory(results_path, 'model_params')

# dataset path
data_path = '../data/Synthetic_dataset.h5'
train, valid, test = helper.load_synthetic_dataset(data_path)

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None

with open(os.path.join(results_path, 'performance.tsv'), 'wb') as f:

    for i in range(len(dropout_status)):

        for n, noise in enumerate(noise_status):
            # loop through models
#-------------------------------------------------------------------------------------------

all_models = ['cnn_deep_noreg'] 
# all adversarial, clean then all adversarial
adv_type = [(80, 0, const_annealing(0)), (80, 20, const_annealing(0)), (80, 20, const_annealing(0.5)), (80, 0, cos_annealing(80))]

batch_size = 50
verbose = 1 

print_adv_test = True

# save path
results_path = '../results'
#results_path = '/content/drive/My Drive/results'
model_path = utils.make_directory(results_path, 'model_params')
metrics_path = utils.make_directory(results_path, 'train_metrics')

# dataset path
data_path = '../data/Synthetic_dataset.h5'
train, valid, test = helper.load_synthetic_dataset(data_path)

adv_test = copy.deepcopy(test)

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None
output_shape = [None, train['targets'].shape[1]]

for model_name in all_models:
#-------------------------------------------------------------------------------------------

all_models = ['cnn_25_noreg_mnist'] 
# all adversarial, clean then all adversarial
adv_type = [(40, 0, const_annealing(0)), (40, 10, const_annealing(0)), (40, 10, const_annealing(0.5)), (40, 0, cos_annealing(40))]
#adv_type = [(80, 0, cos_annealing(80))]

batch_size = 50
verbose = 1 
print_adv_test = True

# save path
results_path = '../results'
#results_path = '/content/drive/My Drive/results'
model_path = utils.make_directory(results_path, 'model_params/mnist')

# dataset path
data_path = '../data/Synthetic_dataset.h5'
train, valid, test = helper.load_synthetic_dataset(data_path)

# Import MINST data
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('MNIST_data', one_hot=True)

train['inputs'] = np.reshape(mnist.train.images, [-1, 28, 28, 1])
train['targets'] = mnist.train.labels

valid['inputs'] = np.reshape(mnist.validation.images, [-1, 28, 28, 1])
valid['targets'] = mnist.validation.labels
Ejemplo n.º 5
0
import os, sys
import numpy as np
import helper
import matplotlib.pyplot as plt
import tensorflow as tf
from deepomics import neuralnetwork as nn
from deepomics import utils, visualize

#------------------------------------------------------------------------------------------------


all_models = ['cnn_2', 'cnn_4', 'cnn_10', 'cnn_25', 'cnn_50', 'cnn_100',
			  'cnn_50_2', 'cnn9_4', 'cnn9_25']

# save path
results_path = utils.make_directory('../results', 'invivo')
params_path = utils.make_directory(results_path, 'model_params')
save_path = utils.make_directory(results_path, 'conv_filters')

# get data shapes
input_shape = [None, 1000, 1, 4]
output_shape = [None, 12]

# loop through models
num_params_all = []
roc_scores = []
pr_scores = []
roc_all = []
pr_all = []
for model_name in all_models:
	print('model: ' + model_name)
Ejemplo n.º 6
0
import os, sys
import numpy as np
import helper
import tensorflow as tf
from deepomics import neuralnetwork as nn
from deepomics import utils, fit

#------------------------------------------------------------------------------------------------

all_models = [
    'cnn_2', 'cnn_4', 'cnn_10', 'cnn_25', 'cnn_50', 'cnn_100', 'cnn_50_2',
    'cnn9_4', 'cnn9_25'
]

# save path
results_path = utils.make_directory('../results', 'invivo')
params_path = utils.make_directory(results_path, 'model_params')

# load dataset
data_path = '../data/invivo_dataset.h5'
train, valid, test = helper.load_invivo_dataset(data_path)

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None
output_shape = [None, train['targets'].shape[1]]

# loop through models
for model_name in all_models:
    print('model: ' + model_name)
It will also output an array in the shape (num_models, num_reg) showing the name of each model for ease of indexing into the larger model.

'''
#------------------------------------------------------------------------------------------------

all_models = ['DistNet', 'LocalNet', 'DeepBind', 'StandardNet']
num_models = len(all_models)  # number of models
dropout_status = [True, True, False, False, False, True, True, False]
l2_status = [True, False, True, False, False, True, False, True]
bn_status = [True, False, False, True, False, False, True, True]
num_reg = len(dropout_status)  # number of regularization types

# save path
results_path = '../results'
params_path = utils.make_directory(results_path, 'model_params')

# dataset path
data_path = '../data/Synthetic_dataset.h5'

# load dataset
train, valid, test = helper.load_synthetic_dataset(data_path)
#Get the indices of correctly labelled sequences
right_index = np.where(test['targets'][:, 0] == 1)[0]
num_pos = len(right_index)  # number of positively labelled sequences

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None

_, seqlen, _, dims = test[
from deepomics import utils, fit, visualize, saliency, metrics
from sklearn.metrics import roc_curve, auc, precision_recall_curve, accuracy_score, roc_auc_score
import helper
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
np.random.seed(247)
tf.set_random_seed(247)

#---------------------------------------------------------------------------------------------------------

all_models = ['LocalNet']
noise_status = [False, True, False]
adv_status = [False, False, True]

# save path
results_path = '../results'
model_path = utils.make_directory(results_path, 'model_params')
save_path = utils.make_directory(results_path, 'conv_filters')

# dataset path
data_path = '../data/Synthetic_dataset.h5'
train, valid, test = helper.load_synthetic_dataset(data_path)

# get data shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None
output_shape = [None, train['targets'].shape[1]]

with open(os.path.join(results_path, 'performance.tsv'), 'wb') as f:

    for n, noise in enumerate(noise_status):
        # loop through models
import os, sys
import numpy as np
import helper
import matplotlib.pyplot as plt
import tensorflow as tf
from deepomics import neuralnetwork as nn
from deepomics import utils, visualize


# plot 2nd layer filters of CNN-1
model_name = 'cnn_1'
num_trials = 5

for trial in range(num_trials):

    results_path = utils.make_directory('../results', 'synthetic_'+str(trial))
    params_path = utils.make_directory(results_path, 'model_params')
    save_path = utils.make_directory(results_path, 'conv_filters')

    # load dataset
    data_path = '../data/synthetic_dataset.h5'
    train, valid, test = helper.load_synthetic_dataset(data_path)

    # get data shapes
    input_shape = list(train['inputs'].shape)
    input_shape[0] = None
    output_shape = [None, train['targets'].shape[1]]

    print('model: ' + model_name)
    tf.reset_default_graph()
    tf.set_random_seed(247)
train, valid, test = load_data.simulation_pickle(file_path)

#-------------------------------------------------------------------------------------

# build network
shape = (None, train[0].shape[1], train[0].shape[2], train[0].shape[3])
num_labels = train[1].shape[1]
network, placeholders, optimization = standard_model.model(shape, num_labels)

# build neural network class
nnmodel = nn.NeuralNet(network, placeholders)
nnmodel.inspect_layers()

# set output file paths
output_name = 'test'
utils.make_directory(data_path, 'Results')
file_path = os.path.join(data_path, 'Results', output_name)
nntrainer = nn.NeuralTrainer(nnmodel, optimization, save='best', file_path=file_path)

# train model
learn.train_minibatch(nntrainer, data={'train': train, 'valid': valid}, 
                              batch_size=100, num_epochs=500, patience=10, verbose=1)

# load best model --> lowest cross-validation error
nntrainer.set_best_parameters()

# test model
nntrainer.test_model(test, name="test", batch_size=100)

# save all performance metrics (train, valid, test)
nntrainer.save_all_metrics(file_path)