Example #1
0
    train_labels = np.expand_dims(train_labels, 1)
    test_labels = np.expand_dims(test_labels, 1)

    train_data = np.column_stack((train_samples, train_labels))
    test_data = np.column_stack((test_samples, test_labels))

    if t_label is not None:
        # Redefine the label of the test sample
        test_labels[t] = t_label
        test_data[t][-1] = t_label
    if t_features is not None:
        # Redefine the feature values of the test sample
        test_samples[t] = t_features
        test_data[t] = np.concatenate((t_features, test_labels[t]))

    train_set = DataSet(train_samples, train_labels)
    test_set = DataSet(test_samples, test_labels)
    validation_set = None
    data_sets = base.Datasets(train=train_set, test=test_set, validation=validation_set)

    # Plot train samples
    plot_samples(train_samples, train_labels, plot_pdf=pdf,
                 plot_title='Train Dataset')

    # Plot train samples with test sample
    plot_samples(train_samples, train_labels, plot_pdf=pdf,
                 plot_title='Train Dataset', ref_ex=test_samples[t])

    # Plot test samples
    plot_samples(test_samples, test_labels, plot_pdf=pdf,
                 plot_title='Test Dataset',
Example #2
0
val_labels = train_labels[:n_val]
train_labels = train_labels[n_val:]

if t_label is not None:
    # Redefine the label of the test sample
    test_labels[t] = t_label
if t_features is not None:
    # Redefine the feature values of the test sample
    test_samples[t] = t_features

# Adjust the shape
train_labels = np.expand_dims(train_labels, 1)
test_labels = np.expand_dims(test_labels, 1)
val_labels = np.expand_dims(val_labels, 1)

train_set = DataSet(train_samples, train_labels)
test_set = DataSet(test_samples, test_labels)
validation_set = DataSet(val_samples, val_labels)
data_sets = base.Datasets(train=train_set,
                          test=test_set,
                          validation=validation_set)

# Plot train samples
plot_samples(train_samples, train_labels, plot_title='Train Dataset')

# Plot train samples with test sample
plot_samples(train_samples,
             train_labels,
             plot_title='Train Dataset',
             ref_ex=test_samples[t])
    from src.InterpretableSpn import InterpretableSpn
    from src.influence.dataset import DataSet  # for train and test set creation
    from src.help_functions import *
    from prettytable import PrettyTable

    # Get train and test set
    num_train_samples = 5000
    num_test_samples = 10000
    (train_images,
     train_labels), (test_images,
                     test_labels) = load_mnist(num_train_samples,
                                               num_test_samples,
                                               normalization=False)

    train_set = DataSet(train_images, np.expand_dims(train_labels, 1))
    test_set = DataSet(test_images, np.expand_dims(test_labels, 1))
    validation_set = None
    data_sets = base.Datasets(train=train_set,
                              test=test_set,
                              validation=validation_set)

    label_idx = 784
    num_classes = 10
    batch_size = 1

    output_path = "/home/ml-mrothermel/projects/Interpreting-SPNs/output/spns"
    file_name = "tf_mnist_spn_9"

    # Import a trained, saved and converted model with new placeholders
    sample_placeholder = tf.placeholder(dtype=np.float32,