Ejemplo n.º 1
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy',
                optimizer=opt)
    net.dataset.get("cifar10")
    widget = net.dashboard()
    widget.goto("begin")
    widget.goto("next")
    widget.goto("end")
    widget.goto("prev")
    widget.prop_one()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train(plot=False)
    net.propagate(net.dataset.inputs[0])
    net.dataset.clear()
Ejemplo n.º 2
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy',
                optimizer=opt)
    net.get_dataset("cifar10")
    widget = net.dashboard()
    widget.goto("begin")
    widget.goto("next")
    widget.goto("end")
    widget.goto("prev")
    widget.prop_one()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train(plot=False)
    net.propagate(net.dataset.inputs[0])
    net.dataset.clear()
Ejemplo n.º 3
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([
        ([[0],[0]], [[0],[0]]),
        ([[0],[1]], [[1],[1]]),
        ([[1],[0]], [[1],[1]]),
        ([[1],[1]], [[0],[0]])
    ])
    net.train(2000, report_rate=10, accuracy=1, plot=False)
    net.test()
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.test()
    svg = net.to_svg()
    assert net is not None
Ejemplo n.º 4
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([
        ([[0],[0]], [[0],[0]]),
        ([[0],[1]], [[1],[1]]),
        ([[1],[0]], [[1],[1]]),
        ([[1],[1]], [[0],[0]])
    ])
    net.train(2000, report_rate=10, accuracy=1, plot=False)
    net.evaluate(show=True)
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.evaluate(show=True)
    svg = net.to_svg()
    assert net is not None
Ejemplo n.º 5
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1],
                                                                    [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25)
    net.test()
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.build_svg()
    assert net is not None
Ejemplo n.º 6
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]],
                      [[0, 1], [1]],
                      [[1, 0], [1]],
                      [[1, 1], [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25, plot=False)
    net.evaluate(show=True)
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.to_svg()
    assert net is not None
Ejemplo n.º 7
0
Archivo: xor4.py Proyecto: lepy/conx
from conx import Network, Layer, SGD

net = Network("XOR2")
net.add(Layer("input1", 2))
net.add(Layer("input2", 2))
net.add(Layer("hidden1", 2, activation="sigmoid"))
net.add(Layer("hidden2", 2, activation="sigmoid"))
net.add(Layer("shared-hidden", 2, activation="sigmoid"))
net.add(Layer("output1", 2, activation="sigmoid"))
net.add(Layer("output2", 2, activation="sigmoid"))

net.connect("input1", "hidden1")
net.connect("input2", "hidden2")
net.connect("hidden1", "shared-hidden")
net.connect("hidden2", "shared-hidden")
net.connect("shared-hidden", "output1")
net.connect("shared-hidden", "output2")

net.compile(loss='mean_squared_error', optimizer=SGD(lr=0.3, momentum=0.9))

ds = [([[0, 0], [0, 0]], [[0, 0], [0, 0]]), ([[0, 0], [1, 1]], [[1, 1], [1,
                                                                         1]]),
      ([[1, 1], [0, 0]], [[1, 1], [1, 1]]), ([[1, 1], [1, 1]], [[0, 0], [0,
                                                                         0]])]
net.dataset.load(ds)
net.train(2000, report_rate=10, accuracy=1)
net.test()
Ejemplo n.º 8
0
from conx import Network

inputs = [[0, 0],
          [0, 1],
          [1, 0],
          [1, 1]]

def xor(inputs):
    a = inputs[0]
    b = inputs[1]
    return [[0.1, 0.9][int((a or b) and not(a and b))]]

net = Network(2, 2, 1)
net.set_inputs(inputs)
net.set_target_function(xor)
net.train()
net.test()

net = Network(2, 2, 2, 1)
net.set_inputs(inputs)
net.set_target_function(xor)
net.train(max_training_epochs=10000)
net.test()

inputs = [[[0, 0], [0, 0]],
          [[0, 1], [1, 1]],
          [[1, 0], [1, 1]],
          [[1, 1], [0, 0]]]

net = Network(2, 10, 2)
net.set_inputs(inputs)
Ejemplo n.º 9
0
net = Network(784, 100, 1)

inputs = [train_set[0][i] for i in range(len(train_set[0]))]
targets = [[train_set[1][i]/9.0] for i in range(len(train_set[0]))]

inputs = inputs[:100]
targets = targets[:100]

def display_digit(vector):
    for r in range(28):
        for c in range(28):
            v = int(vector[r * 28 + c] * 10)
            ch = " .23456789"[v]
            print(ch, end="")
        print()
        
net.display_test_input = display_digit
net.set_inputs(list(zip(inputs, targets)))

net.test(2)
net.train(report_rate=1, tolerance=0.05)
net.test(2)

for i in range(100):
    output = net.propagate(inputs[i])
    target = int(targets[i][0] * 9)
    print("target:", target, 
          "output:", output, 
          "correct?", int(output * 10) == target)