Ejemplo n.º 1
0
    def load_data_sets(self):
        if self.task == 'spam_enron':
            self.data_sets = load_spam(ex_to_leave_out=self.ex_to_leave_out, num_examples=self.num_examples)
        elif self.task == 'small_mnist':
            self.data_sets = load_small_mnist(self.data_dir)
        elif self.task == 'mnist':
            self.data_sets = load_small_mnist('data')
        elif self.task == 'heart_disease':
            self.data_sets = load_heart_disease(ex_to_leave_out=self.ex_to_leave_out, num_examples=self.num_examples)
        elif self.task == 'income':
            self.data_sets = load_income(ex_to_leave_out=self.ex_to_leave_out, num_examples=self.num_examples)

        if 'mnist' in self.task:
            self.input_side = 28
            self.input_channels = 1
            self.input_dim = self.input_side * self.input_side * self.input_channels 
        else:
            self.input_dim = self.data_sets.train.x.shape[1]
Ejemplo n.º 2
0
# import scipy
# import sklearn

import random

import influence.experiments as experiments
from influence.all_CNN_c import All_CNN_C

from load_mnist import load_small_mnist, load_mnist

import tensorflow as tf

# np.random.seed(42)

data_sets = load_small_mnist('data')

num_classes = 10
input_side = 28
input_channels = 1
input_dim = input_side * input_side * input_channels
weight_decay = 0.001
batch_size = 500

initial_learning_rate = 0.0001
decay_epochs = [10000, 20000]
hidden1_units = 8
hidden2_units = 8
hidden3_units = 8
conv_patch_size = 3
keep_probs = [1.0, 1.0]