Пример #1
0
def import_keras_model(model, network_name):
    """
    Import a keras model into conx.

    """
    from .network import Network
    import inspect
    import conx
    network = Network(network_name)
    network.model = model
    conx_layers = {
        name: layer
        for (name, layer) in inspect.getmembers(conx.layers, inspect.isclass)
    }
    # First, make all of the conx layers:
    for layer in model.layers:
        clayer_class = conx_layers[layer.__class__.__name__ + "Layer"]
        if clayer_class.__name__ == "InputLayerLayer":
            clayer = conx.layers.InputLayer(layer.name, None)
            #clayer.make_input_layer_k = lambda layer=layer: layer
            clayer.shape = None
            clayer.params["batch_shape"] = layer.get_config(
            )["batch_input_shape"]
            #clayer.params = layer.get_config()
            clayer.k = clayer.make_input_layer_k()
            clayer.keras_layer = clayer.k
        else:
            clayer = clayer_class(**layer.get_config())
            clayer.k = layer
            clayer.keras_layer = layer
        network.add(clayer)
    # Next, connect them up:
    for layer_from in model.layers:
        for node in layer.outbound_nodes:
            network.connect(layer_from, node.outbound_layer.name)
            print("connecting:", layer_from, node.outbound_layer.name)
    # Connect them all up, and set input banks:
    network.connect()
    for clayer in network.layers:
        clayer.input_names = network.input_bank_order
    # Finally, make the internal models:
    for clayer in network.layers:
        ## FIXME: the appropriate inputs:
        if clayer.kind() != "input":
            clayer.model = keras.models.Model(
                inputs=model.layers[0].input,
                outputs=clayer.keras_layer.output)
    return network
Пример #2
0
 def plot(self,
          metrics='loss',
          symbols=None,
          interactive=True,
          format='svg'):
     """
     Plot all of the results of the experiment on a single plot.
     """
     from conx import Network
     colors = list('bgrcmyk')
     symbols = {}
     count = 0
     for (category, exp_name) in self.results:
         if category not in symbols:
             symbols[category] = colors[count % len(colors)] + '-'
             count += 1
     fig_ax = None
     for (category, exp_name) in self.results:
         if exp_name in self.cache:
             net = self.cache[exp_name]
         else:
             net = Network.load(exp_name)
         fig_ax = net.plot(metrics,
                           return_fig_ax=True,
                           fig_ax=fig_ax,
                           label=category,
                           symbols=symbols,
                           title=self.name)
     fig, ax = fig_ax
     if interactive:
         plt.show(block=False)
     else:
         from IPython.display import SVG
         bytes = io.BytesIO()
         if format == "svg":
             plt.savefig(bytes, format="svg")
             plt.close(fig)
             img_bytes = bytes.getvalue()
             return SVG(img_bytes.decode())
         elif format == "pil":
             plt.savefig(bytes, format="png")
             plt.close(fig)
             bytes.seek(0)
             pil_image = PIL.Image.open(bytes)
             return pil_image
         else:
             raise Exception("format must be 'svg' or 'pil'")
Пример #3
0
def test_network_constructor():
    """
    Network constructor.
    """
    net = Network("Constructor", 2, 5, 2)
    assert net is not None
Пример #4
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same',
                        activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same',
                        activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(
        Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy', optimizer=opt)
    net.dataset.get("cifar10")
    net.dashboard()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train()
    net.propagate(net.dataset.inputs[0])
Пример #5
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1],
                                                                    [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25)
    net.test()
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.build_svg()
    assert net is not None
Пример #6
0
Файл: xor4.py Проект: lepy/conx
from conx import Network, Layer, SGD

net = Network("XOR2")
net.add(Layer("input1", 2))
net.add(Layer("input2", 2))
net.add(Layer("hidden1", 2, activation="sigmoid"))
net.add(Layer("hidden2", 2, activation="sigmoid"))
net.add(Layer("shared-hidden", 2, activation="sigmoid"))
net.add(Layer("output1", 2, activation="sigmoid"))
net.add(Layer("output2", 2, activation="sigmoid"))

net.connect("input1", "hidden1")
net.connect("input2", "hidden2")
net.connect("hidden1", "shared-hidden")
net.connect("hidden2", "shared-hidden")
net.connect("shared-hidden", "output1")
net.connect("shared-hidden", "output2")

net.compile(loss='mean_squared_error', optimizer=SGD(lr=0.3, momentum=0.9))

ds = [([[0, 0], [0, 0]], [[0, 0], [0, 0]]), ([[0, 0], [1, 1]], [[1, 1], [1,
                                                                         1]]),
      ([[1, 1], [0, 0]], [[1, 1], [1, 1]]), ([[1, 1], [1, 1]], [[0, 0], [0,
                                                                         0]])]
net.dataset.load(ds)
net.train(2000, report_rate=10, accuracy=1)
net.test()
Пример #7
0
from conx import Network, SGD

ds = [[[0, 0], [0], "one"], [[0, 1], [1], "two"], [[1, 0], [1], "three"],
      [[1, 1], [0], "four"]]

net = Network("XOR", 2, 2, 1, activation="sigmoid")
net.compile(error='mean_squared_error', optimizer=SGD(lr=0.3, momentum=0.9))

# NOTE:
#    net = Network("XOR", 2, 3, 4, 1, activation="sigmoid")
# is the same as:
#    net = Network("XOR")
#    net.add(Layer("input", shape=2))
#    net.add(Layer("hidden1", shape=3, activation="sigmoid"))
#    net.add(Layer("hidden2", shape=4, activation="sigmoid"))
#    net.add(Layer("output", shape=1, activation="sigmoid"))
#    net.connect("input", "hidden1")
#    net.connect("hidden1", "hidden2")
#    net.connect("hidden2", "output")

net.dataset.load(ds)
net.train(2000, report_rate=10, accuracy=1)
net.test()
Пример #8
0
def test_dataset2():
    """
    Load data before adding network.
    """
    net = Network("MNIST")
    net.add(Layer("input", shape=784, vshape=(28, 28), colormap="hot", minmax=(0,1)))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    net.get_dataset("mnist")
    net.dataset.split(100)
    net.dataset.slice(100)
    assert net is not None
    net.dataset.clear()
Пример #9
0
def test_images():
    net = Network("MNIST")
    net.dataset.get("mnist")
    assert net.dataset.inputs.shape == [(28,28,1)]
    net.add(Layer("input", shape=(28, 28, 1), colormap="hot", minmax=(0,1)))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'flatten')
    net.connect('flatten', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    svg = net.to_svg()
    assert svg is not None
    net.dataset.clear()
Пример #10
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([
        ([[0],[0]], [[0],[0]]),
        ([[0],[1]], [[1],[1]]),
        ([[1],[0]], [[1],[1]]),
        ([[1],[1]], [[0],[0]])
    ])
    net.train(2000, report_rate=10, accuracy=1, plot=False)
    net.evaluate(show=True)
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.evaluate(show=True)
    svg = net.to_svg()
    assert net is not None
Пример #11
0
def test_xor1():
    """
    Standard XOR.
    """
    net = Network("XOR")
    net.add(Layer("input", 2))
    net.add(Layer("hidden", 5))
    net.add(Layer("output", 1))
    net.connect("input", "hidden")
    net.connect("hidden", "output")
    net.compile(error="binary_crossentropy", optimizer="adam")
    net.summary()
    net.model.summary()
    net.dataset.load([[[0, 0], [0]],
                      [[0, 1], [1]],
                      [[1, 0], [1]],
                      [[1, 1], [0]]])
    net.train(epochs=2000, accuracy=1, report_rate=25, plot=False)
    net.evaluate(show=True)
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    svg = net.to_svg()
    assert net is not None
Пример #12
0
def test_cifar10():
    """
    Test the cifar10 API and training.
    """
    from conx import Network, Layer, Conv2DLayer, MaxPool2DLayer, FlattenLayer

    batch_size = 32
    num_classes = 10
    epochs = 200
    data_augmentation = True
    num_predictions = 20

    net = Network("CIRAR10")
    net.add(Layer("input", (32, 32, 3)))
    net.add(Conv2DLayer("conv1", 32, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv2", 32, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool1", pool_size=(2, 2), dropout=0.25))
    net.add(Conv2DLayer("conv3", 64, (3, 3), padding='same', activation='relu'))
    net.add(Conv2DLayer("conv4", 64, (3, 3), activation='relu'))
    net.add(MaxPool2DLayer("pool2", pool_size=(2, 2), dropout=0.25))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", 512, activation='relu', vshape=(16, 32), dropout=0.5))
    net.add(Layer("output", num_classes, activation='softmax'))
    net.connect()

    # initiate RMSprop optimizer
    opt = RMSprop(lr=0.0001, decay=1e-6)
    net.compile(error='categorical_crossentropy',
                optimizer=opt)
    net.get_dataset("cifar10")
    widget = net.dashboard()
    widget.goto("begin")
    widget.goto("next")
    widget.goto("end")
    widget.goto("prev")
    widget.prop_one()
    net.dataset.slice(10)
    net.dataset.shuffle()
    net.dataset.split(.5)
    net.train(plot=False)
    net.propagate(net.dataset.inputs[0])
    net.dataset.clear()
Пример #13
0
def test_images():
    net = Network("MNIST")
    net.get_dataset("mnist")
    assert net.dataset.inputs.shape == [(28,28,1)]
    net.add(Layer("input", shape=(28, 28, 1), colormap="hot", minmax=(0,1)))
    net.add(FlattenLayer("flatten"))
    net.add(Layer("hidden1", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("hidden2", shape=512, vshape=(16,32), activation='relu', dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'flatten')
    net.connect('flatten', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    svg = net.to_svg()
    assert svg is not None
    net.dataset.clear()
Пример #14
0
from conx import Network

inputs = [[0, 0],
          [0, 1],
          [1, 0],
          [1, 1]]

def xor(inputs):
    a = inputs[0]
    b = inputs[1]
    return [[0.1, 0.9][int((a or b) and not(a and b))]]

net = Network(2, 2, 1)
net.set_inputs(inputs)
net.set_target_function(xor)
net.train()
net.test()

net = Network(2, 2, 2, 1)
net.set_inputs(inputs)
net.set_target_function(xor)
net.train(max_training_epochs=10000)
net.test()

inputs = [[[0, 0], [0, 0]],
          [[0, 1], [1, 1]],
          [[1, 0], [1, 1]],
          [[1, 1], [0, 0]]]

net = Network(2, 10, 2)
net.set_inputs(inputs)
Пример #15
0
def test_xor2():
    """
    Two inputs, two outputs.
    """
    net = Network("XOR2")
    net.add(Layer("input1", shape=1))
    net.add(Layer("input2", shape=1))
    net.add(Layer("hidden1", shape=2, activation="sigmoid"))
    net.add(Layer("hidden2", shape=2, activation="sigmoid"))
    net.add(Layer("shared-hidden", shape=2, activation="sigmoid"))
    net.add(Layer("output1", shape=1, activation="sigmoid"))
    net.add(Layer("output2", shape=1, activation="sigmoid"))
    net.connect("input1", "hidden1")
    net.connect("input2", "hidden2")
    net.connect("hidden1", "shared-hidden")
    net.connect("hidden2", "shared-hidden")
    net.connect("shared-hidden", "output1")
    net.connect("shared-hidden", "output2")
    net.compile(error='mean_squared_error',
                optimizer=SGD(lr=0.3, momentum=0.9))

    net.dataset.load([([[0], [0]], [[0], [0]]), ([[0], [1]], [[1], [1]]),
                      ([[1], [0]], [[1], [1]]), ([[1], [1]], [[0], [0]])])
    net.train(2000, report_rate=10, accuracy=1)
    net.test()
    net.propagate_to("shared-hidden", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.propagate_to("hidden1", [[1], [1]])
    net.propagate_to("hidden2", [[1], [1]])
    net.propagate_to("output1", [[1], [1]])
    net.propagate_to("output2", [[1], [1]])
    net.save_weights("/tmp")
    net.load_weights("/tmp")
    net.test()
    svg = net.build_svg()
    assert net is not None
Пример #16
0
Файл: ga.py Проект: is44c/Calico
 def __init__(self, cnt):
     n = Network()
     n.add( Layer('input', 2) )
     n.add( Layer('hidden', 3) )
     n.add( Layer('output', 1) )
     n.connect('input', 'hidden')
     n.connect('hidden', 'output')
     n.setInputs([[0.0, 0.0],
                  [0.0, 1.0],
                  [1.0, 0.0],
                  [1.0, 1.0]])
     n.setOutputs([[0.0],
                   [1.0],
                   [1.0],
                   [0.0]])
     n.setVerbosity(0)
     n.setTolerance(.4)
     n.setLearning(0)
     g = n.arrayify()
     self.network = n
     GA.__init__(self,
                 Population(cnt, Gene, size=len(g), verbose=1,
                            min=-10, max=10, maxStep = 1,
                            imin=-10, imax=10, 
                            elitePercent = .01),
                 mutationRate=0.05, crossoverRate=0.6,
                 maxGeneration=400, verbose=1)
Пример #17
0
def test_dataset():
    """
    Load MNIST dataset after network creation.
    """
    net = Network("MNIST")
    net.add(
        Layer("input",
              shape=784,
              vshape=(28, 28),
              colormap="hot",
              minmax=(0, 1)))
    net.add(
        Layer("hidden1",
              shape=512,
              vshape=(16, 32),
              activation='relu',
              dropout=0.2))
    net.add(
        Layer("hidden2",
              shape=512,
              vshape=(16, 32),
              activation='relu',
              dropout=0.2))
    net.add(Layer("output", shape=10, activation='softmax'))
    net.connect('input', 'hidden1')
    net.connect('hidden1', 'hidden2')
    net.connect('hidden2', 'output')
    net.compile(optimizer="adam", error="binary_crossentropy")
    net.dataset.get("mnist")
    assert net is not None
Пример #18
0
import gzip
import numpy
import os

directory, filname = os.path.split(__file__)

with gzip.open(os.path.join(directory, 'mnist.pkl.gz'), 'rb') as f:
    try: # Python3
        u = pickle._Unpickler(f)
        u.encoding = 'latin1'
        data = u.load()
    except: # Python2
        data = pickle.load(f)
    train_set, validation_set, test_set = data

net = Network(784, 100, 1)

inputs = [train_set[0][i] for i in range(len(train_set[0]))]
targets = [[train_set[1][i]/9.0] for i in range(len(train_set[0]))]

inputs = inputs[:100]
targets = targets[:100]

def display_digit(vector):
    for r in range(28):
        for c in range(28):
            v = int(vector[r * 28 + c] * 10)
            ch = " .23456789"[v]
            print(ch, end="")
        print()