Beispiel #1
0
 def __init__(self, cnt):
     n = Network()
     n.add( Layer('input', 2) )
     n.add( Layer('hidden', 3) )
     n.add( Layer('output', 1) )
     n.connect('input', 'hidden')
     n.connect('hidden', 'output')
     n.setInputs([[0.0, 0.0],
                  [0.0, 1.0],
                  [1.0, 0.0],
                  [1.0, 1.0]])
     n.setOutputs([[0.0],
                   [1.0],
                   [1.0],
                   [0.0]])
     n.setVerbosity(0)
     n.setTolerance(.4)
     n.setLearning(0)
     g = n.arrayify()
     self.network = n
     GA.__init__(self,
                 Population(cnt, Gene, size=len(g), verbose=1,
                            min=-10, max=10, maxStep = 1,
                            imin=-10, imax=10, 
                            elitePercent = .01),
                 mutationRate=0.05, crossoverRate=0.6,
                 maxGeneration=400, verbose=1)
Beispiel #2
0
def test_dataset():
    """
    Load MNIST dataset after network creation.
    """
    net = Network("MNIST")
    net.add(
        Layer("input",
              shape=784,
              vshape=(28, 28),
              colormap="hot",
              minmax=(0, 1)))
    net.add(
        Layer("hidden1",
              shape=512,
              vshape=(16, 32),
              activation='relu',
              dropout=0.2))
    net.add(
        Layer("hidden2",
              shape=512,
              vshape=(16, 32),
              activation='relu',
              dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    net.dataset.get("mnist")
    assert net is not None
Beispiel #3
0
def import_keras_model(model, network_name):
    """
    Import a keras model into conx.

    """
    from .network import Network
    import inspect
    import conx
    network = Network(network_name)
    network.model = model
    conx_layers = {
        name: layer
        for (name, layer) in inspect.getmembers(conx.layers, inspect.isclass)
    }
    # First, make all of the conx layers:
    for layer in model.layers:
        clayer_class = conx_layers[layer.__class__.__name__ + "Layer"]
        if clayer_class.__name__ == "InputLayerLayer":
            clayer = conx.layers.InputLayer(layer.name, None)
            #clayer.make_input_layer_k = lambda layer=layer: layer
            clayer.shape = None
            clayer.params["batch_shape"] = layer.get_config(
            )["batch_input_shape"]
            #clayer.params = layer.get_config()
            clayer.k = clayer.make_input_layer_k()
            clayer.keras_layer = clayer.k
        else:
            clayer = clayer_class(**layer.get_config())
            clayer.k = layer
            clayer.keras_layer = layer
        network.add(clayer)
    # Next, connect them up:
    for layer_from in model.layers:
        for node in layer.outbound_nodes:
            network.connect(layer_from, node.outbound_layer.name)
            print("connecting:", layer_from, node.outbound_layer.name)
    # Connect them all up, and set input banks:
    network.connect()
    for clayer in network.layers:
        clayer.input_names = network.input_bank_order
    # Finally, make the internal models:
    for clayer in network.layers:
        ## FIXME: the appropriate inputs:
        if clayer.kind() != "input":
            clayer.model = keras.models.Model(
                inputs=model.layers[0].input,
                outputs=clayer.keras_layer.output)
    return network
Beispiel #4
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1],
                                                                    [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25)
    net.test()
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.build_svg()
    assert net is not None
Beispiel #5
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]],
                      [[0, 1], [1]],
                      [[1, 0], [1]],
                      [[1, 1], [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25, plot=False)
    net.evaluate(show=True)
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.to_svg()
    assert net is not None
Beispiel #6
0
def test_images():
    net = Network("MNIST")
    net.dataset.get("mnist")
    assert net.dataset.inputs.shape == [(28,28,1)]
    net.add(Layer("input", shape=(28, 28, 1), colormap="hot", minmax=(0,1)))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'flatten')
    net.connect('flatten', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    svg = net.to_svg()
    assert svg is not None
    net.dataset.clear()
Beispiel #7
0
def test_images():
    net = Network("MNIST")
    net.get_dataset("mnist")
    assert net.dataset.inputs.shape == [(28,28,1)]
    net.add(Layer("input", shape=(28, 28, 1), colormap="hot", minmax=(0,1)))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'flatten')
    net.connect('flatten', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    svg = net.to_svg()
    assert svg is not None
    net.dataset.clear()
Beispiel #8
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([
        ([[0],[0]], [[0],[0]]),
        ([[0],[1]], [[1],[1]]),
        ([[1],[0]], [[1],[1]]),
        ([[1],[1]], [[0],[0]])
    ])
    net.train(2000, report_rate=10, accuracy=1, plot=False)
    net.test()
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.test()
    svg = net.to_svg()
    assert net is not None
Beispiel #9
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([
        ([[0],[0]], [[0],[0]]),
        ([[0],[1]], [[1],[1]]),
        ([[1],[0]], [[1],[1]]),
        ([[1],[1]], [[0],[0]])
    ])
    net.train(2000, report_rate=10, accuracy=1, plot=False)
    net.evaluate(show=True)
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.evaluate(show=True)
    svg = net.to_svg()
    assert net is not None
Beispiel #10
0
def test_dataset():
    """
    Load Virtual MNIST dataset after network creation.
    """
    net = Network("MNIST")
    net.add(Layer("input", shape=784, vshape=(28, 28), colormap="hot", minmax=(0,1)))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    net.get_dataset("mnist")
    assert net is not None
    net.dataset.clear()
Beispiel #11
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same',
                        activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same',
                        activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(
        Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy', optimizer=opt)
    net.dataset.get("cifar10")
    net.dashboard()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train()
    net.propagate(net.dataset.inputs[0])
Beispiel #12
0
Datei: xor4.py Projekt: lepy/conx
from conx import Network, Layer, SGD

net = Network("XOR2")
net.add(Layer("input1", 2))
net.add(Layer("input2", 2))
net.add(Layer("hidden1", 2, activation="sigmoid"))
net.add(Layer("hidden2", 2, activation="sigmoid"))
net.add(Layer("shared-hidden", 2, activation="sigmoid"))
net.add(Layer("output1", 2, activation="sigmoid"))
net.add(Layer("output2", 2, activation="sigmoid"))

net.connect("input1", "hidden1")
net.connect("input2", "hidden2")
net.connect("hidden1", "shared-hidden")
net.connect("hidden2", "shared-hidden")
net.connect("shared-hidden", "output1")
net.connect("shared-hidden", "output2")

net.compile(loss='mean_squared_error', optimizer=SGD(lr=0.3, momentum=0.9))

ds = [([[0, 0], [0, 0]], [[0, 0], [0, 0]]), ([[0, 0], [1, 1]], [[1, 1], [1,
                                                                         1]]),
      ([[1, 1], [0, 0]], [[1, 1], [1, 1]]), ([[1, 1], [1, 1]], [[0, 0], [0,
                                                                         0]])]
net.dataset.load(ds)
net.train(2000, report_rate=10, accuracy=1)
net.test()
Beispiel #13
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy',
                optimizer=opt)
    net.get_dataset("cifar10")
    widget = net.dashboard()
    widget.goto("begin")
    widget.goto("next")
    widget.goto("end")
    widget.goto("prev")
    widget.prop_one()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train(plot=False)
    net.propagate(net.dataset.inputs[0])
    net.dataset.clear()