Beispiel #1
0
def stochastic_backprop(X,
                        layer='output',
                        class_index=None,
                        params=None,
                        num_average=400,
                        threshold=12.0):
    tf.reset_default_graph()
    if 'use_scope' not in params:
        params['use_scope'] = True

    # build new graph
    model_layers, optimization = params['genome_model'](params['input_shape'],
                                                        params['output_shape'])
    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         use_scope=params['use_scope'])
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    # stochastic backprop saliency
    if layer == 'output':
        layer = list(nnmodel.network.keys())[-2]
        saliency, counts = nntrainer.get_stochastic_saliency(
            sess,
            X,
            nnmodel.network[layer],
            class_index=class_index,
            num_average=num_average,
            threshold=threshold)
    else:
        data = {'inputs': X}
        layer_activations = nntrainer.get_activations(sess, data, layer)
        max_activations = np.squeeze(np.max(layer_activations, axis=1))
        active_indices = np.where(max_activations > 0)[0]
        active_indices = active_indices[np.argsort(
            max_activations[active_indices])[::-1]]
        saliency = []
        count = []
        for neuron_index in active_indices:
            val, counts = nntrainer.get_stochastic_saliency(
                sess,
                X,
                nnmodel.network[layer],
                class_index=neuron_index,
                num_average=num_average,
                threshold=threshold)
            saliency.append(val)
            counts.append(count)

    sess.close()
    tf.reset_default_graph()

    return np.vstack(saliency), np.array(counts)
Beispiel #2
0
def smooth_backprop(X,
                    params,
                    layer='output',
                    class_index=None,
                    batch_size=128,
                    num_average=100):
    """wrapper for backprop/guided-backpro saliency"""

    saliency = np.zeros(X.shape)
    for i, x in enumerate(X):
        if np.mod(i, 200) == 0:
            print('%d out of %d' % (i, len(X)))

        if np.mod(i, 50) == 0:
            tf.reset_default_graph()

            # build new graph
            model_layers, optimization, genome_model = load_model(
                params['model_name'], params['input_shape'],
                params['dropout_status'], params['l2_status'],
                params['bn_status'])

            nnmodel = nn.NeuralNet()
            nnmodel.build_layers(model_layers,
                                 optimization,
                                 method='backprop',
                                 use_scope=True)
            nntrainer = nn.NeuralTrainer(nnmodel,
                                         save='best',
                                         filepath=params['model_path'])

            # setup session and restore optimal parameters
            sess = utils.initialize_session(nnmodel.placeholders)
            nntrainer.set_best_parameters(sess,
                                          params['model_path'],
                                          verbose=0)

            # backprop saliency
            if layer == 'output':
                layer = list(nnmodel.network.keys())[-2]

        x = np.expand_dims(x, axis=0)
        shape = list(x.shape)
        shape[0] = num_average

        noisy_saliency = nntrainer.get_saliency(
            sess,
            x + np.random.normal(scale=0.1, size=shape),
            nnmodel.network[layer],
            class_index=class_index,
            batch_size=num_average)
        saliency[i, :, :, :] = np.mean(noisy_saliency, axis=0)

        #if np.mod(i,99) == 0:
        #   sess.close()
        #   tf.reset_default_graph()

    return saliency
def mutagenesis(X, params, layer='output', class_index=None):
    """wrapper for backprop/guided-backpro saliency"""

    tf.reset_default_graph()

    # build new graph
    model_layers, optimization, genome_model = load_model(
        params['model_name'], params['input_shape'], params['output_shape'])

    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         method=method,
                         use_scope=True)
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    wt_scores = nntrainer.get_activations(sess, {'inputs': X}, layer='output')
    if class_index:
        wt_scores = wt_scores[:, class_index]

    N, L, _, A = X.shape

    for x in X:
        X_mut = []
        for l in range(L):
            for a in range(A):
                X_new = np.copy(x)
                X_new[l, 0, :] = 0
                X_new[l, 0, a] = 1
                X_mut.append(X_new)
        X_mut = np.array(X_mut)

        predictions = nntrainer.get_activations(sess, {'inputs': X_mut},
                                                layer='output')

        if class_index:
            predictions = predictions[:, class_index]

        mutagenesis = np.zeros((A, L))
        k = 0
        for l in range(L):
            for a in range(A):
                mutagenesis[a, l] = predictions[k]
                k += 1

        mut_scores.append([mutagenesis - wt_score])

    mut_scores = np.vstack(mut_scores)
    sess.close()

    return mut_scores
Beispiel #4
0
def guided_backprop(X,
                    layer='output',
                    class_index=None,
                    params=None,
                    batch_size=128):
    tf.reset_default_graph()
    if 'use_scope' not in params:
        params['use_scope'] = True

    # build new graph
    #g = tf.get_default_graph()
    #with g.gradient_override_map({'Relu': 'GuidedRelu'}):
    model_layers, optimization = params['genome_model'](params['input_shape'],
                                                        params['output_shape'])
    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         method='guided',
                         use_scope=params['use_scope'])
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    # backprop saliency
    if layer == 'output':
        layer = list(nnmodel.network.keys())[-2]
        saliency = nntrainer.get_saliency(sess,
                                          X,
                                          nnmodel.network[layer],
                                          class_index=class_index,
                                          batch_size=batch_size)
    else:
        data = {'inputs': X}
        layer_activations = nntrainer.get_activations(sess, data, layer)
        max_activations = np.squeeze(np.max(layer_activations, axis=1))
        active_indices = np.where(max_activations > 0)[0]
        active_indices = active_indices[np.argsort(
            max_activations[active_indices])[::-1]]

        saliency = []
        for neuron_index in active_indices:
            val = nntrainer.get_saliency(sess,
                                         X,
                                         nnmodel.network[layer][:, :, :,
                                                                neuron_index],
                                         class_index=None,
                                         batch_size=batch_size)
            saliency.append(val)

    sess.close()
    tf.reset_default_graph()
    return saliency
def backprop(X,
             params,
             layer='output',
             class_index=None,
             batch_size=128,
             method='guided'):
    """wrapper for backprop/guided-backpro saliency"""

    tf.reset_default_graph()

    # build new graph
    model_layers, optimization, genome_model = load_model(
        params['model_name'], params['input_shape'], params['dropout_status'],
        params['l2_status'], params['bn_status'])

    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers,
                         optimization,
                         method=method,
                         use_scope=True)
    nntrainer = nn.NeuralTrainer(nnmodel,
                                 save='best',
                                 filepath=params['model_path'])

    # setup session and restore optimal parameters
    sess = utils.initialize_session(nnmodel.placeholders)
    nntrainer.set_best_parameters(sess, params['model_path'], verbose=0)

    # backprop saliency
    if layer == 'output':
        layer = list(nnmodel.network.keys())[-2]

    saliency = nntrainer.get_saliency(sess,
                                      X,
                                      nnmodel.network[layer],
                                      class_index=class_index,
                                      batch_size=batch_size)

    sess.close()
    tf.reset_default_graph()
    return saliency
    # load model parameters
    genome_model = helper.import_model(model_name)
    model_layers, optimization = genome_model.model(input_shape, output_shape)

    # build neural network class
    nnmodel = nn.NeuralNet()
    nnmodel.build_layers(model_layers, optimization, supervised=True)
    nnmodel.inspect_layers()

    # create neural trainer
    file_path = os.path.join(params_path, model_name)
    nntrainer = nn.NeuralTrainer(nnmodel, save='best', file_path=file_path)

    # initialize session
    sess = utils.initialize_session()

    # set data in dictionary
    data = {'train': train, 'valid': valid, 'test': test}

    # fit model
    fit.train_minibatch(sess,
                        nntrainer,
                        data,
                        batch_size=100,
                        num_epochs=100,
                        patience=20,
                        verbose=2,
                        shuffle=True,
                        save_all=False)
Beispiel #7
0
                                                      output_shape)

            # build neural network class
            nnmodel = nn.NeuralNet(seed=247)
            nnmodel.build_layers(model_layers, optimization)
            nnmodel.inspect_layers()

            # compile neural trainer
            model_save_path = os.path.join(sstype_path,
                                           rbp_name + '_' + cell_name)
            nntrainer = nn.NeuralTrainer(nnmodel,
                                         save='best',
                                         file_path=model_save_path)

            # initialize session
            sess = utils.initialize_session(nnmodel.placeholders)

            # train model
            data = {'train': train, 'valid': valid}
            fit.train_minibatch(sess,
                                nntrainer,
                                data,
                                batch_size=batch_size,
                                num_epochs=num_epochs,
                                patience=25,
                                verbose=2,
                                shuffle=True,
                                save_all=False)

            sess.close()