Ejemplo n.º 1
0
def ensemble_clip_predictions(test,
                              rbp_name,
                              models,
                              input_shape,
                              output_shape,
                              best_path,
                              ss_type,
                              use_scope=True):
    predictions = []
    for model in models:

        # load model
        genome_model = import_model(model)
        model_layers, optimization = genome_model(input_shape, output_shape)

        # build neural network class
        nnmodel = nn.NeuralNet(seed=247)
        nnmodel.build_layers(model_layers, optimization, use_scope=use_scope)

        file_path = os.path.join(best_path, model, ss_type, rbp_name)
        nntrainer = nn.NeuralTrainer(nnmodel, save='best', file_path=file_path)

        # initialize session
        sess = utils.initialize_session(nnmodel.placeholders)

        # load best model
        nntrainer.set_best_parameters(sess, verbose=0)

        predictions.append(nntrainer.get_activations(sess, test))

    predictions = np.hstack(predictions)
    ensemble_predictions = np.mean(predictions, axis=1)
    return ensemble_predictions, predictions
Ejemplo n.º 2
0
def stochastic_backprop(X,
                        layer='output',
                        class_index=None,
                        params=None,
                        num_average=400,
                        threshold=12.0):
    tf.reset_default_graph()
    if 'use_scope' not in params:
        params['use_scope'] = True

    # build new graph
    model_layers, optimization = params['genome_model'](params['input_shape'],
                                                        params['output_shape'])
    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         use_scope=params['use_scope'])
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    # stochastic backprop saliency
    if layer == 'output':
        layer = list(nnmodel.network.keys())[-2]
        saliency, counts = nntrainer.get_stochastic_saliency(
            sess,
            X,
            nnmodel.network[layer],
            class_index=class_index,
            num_average=num_average,
            threshold=threshold)
    else:
        data = {'inputs': X}
        layer_activations = nntrainer.get_activations(sess, data, layer)
        max_activations = np.squeeze(np.max(layer_activations, axis=1))
        active_indices = np.where(max_activations > 0)[0]
        active_indices = active_indices[np.argsort(
            max_activations[active_indices])[::-1]]
        saliency = []
        count = []
        for neuron_index in active_indices:
            val, counts = nntrainer.get_stochastic_saliency(
                sess,
                X,
                nnmodel.network[layer],
                class_index=neuron_index,
                num_average=num_average,
                threshold=threshold)
            saliency.append(val)
            counts.append(count)

    sess.close()
    tf.reset_default_graph()

    return np.vstack(saliency), np.array(counts)
Ejemplo n.º 3
0
def guided_backprop(X,
                    layer='output',
                    class_index=None,
                    params=None,
                    batch_size=128):
    tf.reset_default_graph()
    if 'use_scope' not in params:
        params['use_scope'] = True

    # build new graph
    #g = tf.get_default_graph()
    #with g.gradient_override_map({'Relu': 'GuidedRelu'}):
    model_layers, optimization = params['genome_model'](params['input_shape'],
                                                        params['output_shape'])
    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         method='guided',
                         use_scope=params['use_scope'])
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    # backprop saliency
    if layer == 'output':
        layer = list(nnmodel.network.keys())[-2]
        saliency = nntrainer.get_saliency(sess,
                                          X,
                                          nnmodel.network[layer],
                                          class_index=class_index,
                                          batch_size=batch_size)
    else:
        data = {nnmodel.placeholders['inputs']: X}
        layer_activations = nntrainer.get_activations(sess, data, layer)
        max_activations = np.squeeze(np.max(layer_activations, axis=1))
        active_indices = np.where(max_activations > 0)[0]
        active_indices = active_indices[np.argsort(
            max_activations[active_indices])[::-1]]

        saliency = []
        for neuron_index in active_indices:
            val = nntrainer.get_saliency(sess,
                                         X,
                                         nnmodel.network[layer][:, :, :,
                                                                neuron_index],
                                         class_index=None,
                                         batch_size=batch_size)
            saliency.append(val)

    sess.close()
    tf.reset_default_graph()
    return saliency
Ejemplo n.º 4
0
			for i in range(num_saliency):
				shuffle = np.random.permutation(X.shape[1])
				background.append([mean_saliency[i,shuffle,:,:]])
			background = np.vstack(background)

			# merge datasets
			inputs = np.vstack([mean_saliency, background])
			targets = np.vstack([np.ones((num_saliency,1)), np.zeros((num_saliency,1))])
			shuffle = np.random.permutation(inputs.shape[0])
			new_train = {'inputs': inputs[shuffle], 'targets': targets[shuffle]}

			# load classifier model
			model_layers, optimization = classifier_model(input_shape, output_shape, num_filters)

			# build neural network class
			nnmodel = nn.NeuralNet(seed=247)
			nnmodel.build_layers(model_layers, optimization)
			nnmodel.inspect_layers()

			# compile neural trainer
			file_path = os.path.join(params_path, 'saliency_classifier_'+str(num_saliency))
			nntrainer = nn.NeuralTrainer(nnmodel, save='best', file_path=file_path)

			# initialize session
			sess = utils.initialize_session(nnmodel.placeholders)

			# fit data
			data = {'train': new_train}
			fit.train_minibatch(sess, nntrainer, data, batch_size=32, num_epochs=500,
								  patience=20, verbose=2, shuffle=True, save_all=False)
                'inputs': inputs[shuffle[index[2]:index[3]]],
                'targets': targets[shuffle[index[2]:index[3]]]
            }

            # get shapes
            input_shape = list(train['inputs'].shape)
            input_shape[0] = None
            output_shape = train['targets'].shape

            # load model
            genome_model = helper.import_model(model)
            model_layers, optimization = genome_model(input_shape,
                                                      output_shape)

            # build neural network class
            nnmodel = nn.NeuralNet(seed=512)
            nnmodel.build_layers(model_layers, optimization)
            nnmodel.inspect_layers()

            # compile neural trainer
            file_path = os.path.join(model_path, rbp_name)
            nntrainer = nn.NeuralTrainer(nnmodel,
                                         save='best',
                                         file_path=file_path)

            # initialize session
            sess = utils.initialize_session(nnmodel.placeholders)

            # fit model
            data = {'train': train, 'valid': valid}
            fit.train_minibatch(sess,
# get shapes
input_shape = list(train['inputs'].shape)
input_shape[0] = None
output_shape = train['targets'].shape

tf.reset_default_graph()  # reset any tensorflow graphs
#np.random.seed(247) # for reproducibilitjy
#tf.set_random_seed(247) # for reproducibility

# load model
genome_model = helper.import_model(model)
model_layers, optimization = genome_model(input_shape, output_shape)

# build neural network class
nnmodel = nn.NeuralNet()
nnmodel.build_layers(model_layers, optimization, use_scope=False)

# loop over different secondary structure contexts
sstype_path = helper.make_directory(results_path,
                                    normalize_method + '_' + ss_type)
model_path = helper.make_directory(sstype_path, model)
file_path = os.path.join(model_path, experiment)
nntrainer = nn.NeuralTrainer(nnmodel, save='best', file_path=file_path)

# initialize session
sess = utils.initialize_session(nnmodel.placeholders)

# fit model
data = {'train': train, 'valid': valid}
fit.train_minibatch(sess,