Beispiel #1
0
def nielson_3layer_300rows():
    labeled_data = mnist.MnistDataset.from_pickled_file(30)
    config = network_config.NetworkConfig()
    config.neuron_counts = [784, 30, 10]

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #2
0
def invgrad_3layer():
    labeled_data = mnist.MnistDataset.from_pickled_file()
    config = network_config.NetworkConfig()
    config.param_update_c = param_update_fns.InvGradParamUpdate()
    config.neuron_counts = [784, 30, 10]

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #3
0
def neilson_3layer_200rows_100epochs():
    labeled_data = mnist.MnistDataset.from_pickled_file(20)
    config = network_config.NetworkConfig()
    config.neuron_counts = [784, 30, 10]
    config.epochs = 100

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #4
0
    def test_mnist(self):
        from loader import mnist_loader
        training_data, validation_data, test_data = \
            mnist_loader.load_data_wrapper()

        net = network.Network([784, 30, 10])
        net.SGD(training_data, 30, 10, 3.0)
        self.assertTrue(net.evaluate(test_data) * 1.0 / len(test_data) >= 0.9)
Beispiel #5
0
def delta_boosted_3layer_300rows():
    labeled_data = mnist.MnistDataset.from_pickled_file(30)
    config = network_config.NetworkConfig()
    config.neuron_counts = [784, 30, 10]
    config.delta_boost = 4
    # config.eta = 1

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #6
0
def neilson_5layer_200rows_100epochs():
    labeled_data = mnist.MnistDataset.from_pickled_file(20)
    config = network_config.NetworkConfig()
    config.init_c = init_fns.LeCunNormalInit()
    # config.activation_c = activation_fns.ReLUActivation()
    config.neuron_counts = [784, 30, 20, 15, 10]

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #7
0
def sq_grad_3layer_300rows():
    labeled_data = mnist.MnistDataset.from_pickled_file(30, True)
    config = network_config.NetworkConfig()
    config.neuron_counts = [784, 30, 10]
    config.param_update_c = param_update_fns.SquaredParamUpdate()
    config.eta = 18
    config.epochs = 100

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.test)
Beispiel #8
0
def delta_unboosted_5layer_300rows():
    labeled_data = mnist.MnistDataset.from_pickled_file(30)
    config = network_config.NetworkConfig()
    #config.init_c = init_fns.LeCunNormalInit()
    # config.activation_c = activation_fns.ReLUActivation()
    config.neuron_counts = [784, 30, 20, 15, 10]

    config.eta = 0.1
    config.delta_boost = 1

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #9
0
def nielson_6layer_full_data():
    labeled_data = mnist.MnistDataset.from_pickled_file(30)
    config = network_config.NetworkConfig()
    config.init_c = init_fns.LeCunNormalInit()
    #config.loss_c = loss_fns.QuadraticLoss()
    #config.activation_c = activation_fns.SigmoidActivation()
    config.neuron_counts = [784, 30, 30, 30, 30, 10]
    config.eta = 0.001
    config.lmbda = 0.025
    config.epochs = 500

    net = nn.Network(config)
    net.train(labeled_data.train, labeled_data.validate)
Beispiel #10
0
def linear_data_classical():
    def get_data(count):
        inputs = [
            np.reshape(k, (1, 1)) for k in np.random.uniform(-5, 5, count)
        ]
        outputs = [[[0], [1]] if k[0] < 0 else [[1], [0]] for k in inputs]
        data = list(zip(inputs, outputs, range(len(inputs))))
        return data

    dataset = ld.LabeledData()
    dataset.train = get_data(10)
    dataset.test = get_data(100)
    dataset.validate = get_data(100)

    config = network_config.NetworkConfig()
    #config.param_update_c = param_update_fns.InvGradParamUpdate()
    config.neuron_counts = [1, 3, 5, 2]
    config.epochs = 50

    net = nn.Network(config)
    net.train(dataset)
Beispiel #11
0
 def __init__(self, config: nn.network_config.NetworkConfig):
     self.config = config
     self.networks = [nn.Network(config), nn.Network(config)]
import network.network as network
import network.mnist_loader as mnist
import pickle

# Open the mnist dataset
training_data, validation_data, test_data = mnist.load_data()

# Open the adversarial dataset that we created using the "adversarial_dataset" file
with open('adversarial_samples_training_set.pkl', 'rb') as f:
	adversarial_dataset = pickle.load(f, encoding="latin1")

# Train FNN using the adversarial samples along with the normal training dataset.
net2 = network.Network([784,30,10])
net2.SGD(adversarial_dataset + training_data, 100, 5, 0.1)
filename = 'FNN_adversarial.pkl'
pickle.dump(net2, open(filename, 'wb'))
Beispiel #13
0
if __name__ == "__main__":

    if (len(sys.argv) != 4):
        exit()
    else:
        try:
            ip = sys.argv[1]
            port = int(sys.argv[2])
            name = sys.argv[3]
        except e:
            exit()
    moves = ['N', 'S', 'E', 'O']
    i = 0

    net = network.Network(ip, port, name)
    # print("Numero de joueur : "+str(net.getNumPlayer()))

    mouleFixed = None
    while 1:
        # print("RECEPTION DU PLATEAU ")
        game = net.getBoardState()

        if not game:
            break

        myself = game.players[net.getNumPlayer()]
        # print("COMPUTING DE L'IA")
        depart = game.getPlayerCase(myself)

        if mouleFixed and type(game.getCase(mouleFixed.y,
Beispiel #14
0
def train_model():
    training_data, validation_data, test_data = \
            mnist_loader.load_data_wrapper()
    net = network.Network([784, 30, 10])
    net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
    return net