def make_network(game, config, residuals=5, name="Residual CNN"): """ Make a full network. Game is passed in to get the columns and rows. """ net = cx.Network(name) net.add(cx.Layer("main_input", (game.v, game.h, 2), colormap="Greys", minmax=(0,1))) out_layer = add_conv_block(net, "main_input") for i in range(residuals): out_layer = add_residual_block(net, out_layer) add_policy_block(net, out_layer) add_value_block(net, out_layer) net.compile(loss={'value_head': 'mean_squared_error', 'policy_head': softmax_cross_entropy_with_logits}, optimizer="sgd", lr=config.LEARNING_RATE, momentum=config.LEARNING_RATE, loss_weights={'value_head': 0.5, 'policy_head': 0.5}) for layer in net.layers: if layer.kind() == "hidden": layer.visible = False return net
import conx as cx net = cx.Network("XOR2") net.add(cx.Layer("input1", 2)) net.add(cx.Layer("input2", 2)) net.add(cx.Layer("hidden1", 2, activation="sigmoid")) net.add(cx.Layer("hidden2", 2, activation="sigmoid")) net.add(cx.Layer("shared-hidden", 2, activation="sigmoid")) net.add(cx.Layer("output1", 2, activation="sigmoid")) net.add(cx.Layer("output2", 2, activation="sigmoid")) net.connect("input1", "hidden1") net.connect("input2", "hidden2") net.connect("hidden1", "shared-hidden") net.connect("hidden2", "shared-hidden") net.connect("shared-hidden", "output1") net.connect("shared-hidden", "output2") net.compile(loss='mean_squared_error', optimizer=cx.SGD(lr=0.3, momentum=0.9)) ds = [ ([[0, 0],[0, 0]], [[0, 0],[0, 0]], ["one", "one"]), ([[0, 0],[1, 1]], [[1, 1],[1, 1]], ["two", "two"]), ([[1, 1],[0, 0]], [[1, 1],[1, 1]], ["three", "three"]), ([[1, 1],[1, 1]], [[0, 0],[0, 0]], ["four", "four"]) ] net.dataset.load(ds) net.train(2000, report_rate=10, accuracy=1) net.test()
# # Erstellung und Visualisierung eines Modelles mit ConX # import conx as cx dataset = [[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1], [0]]] net = cx.Network("XOR", 2, 5, 1, activation="sigmoid") net.dataset.load(dataset) net.compile(error='mean_squared_error', optimizer="sgd", lr=0.3, momentum=0.9) net.train(2000, report_rate=10, accuracy=1.0) net.picture()
import conx as cx ds = [[[0, 0], [0, 0.5]], [[0, 1], [1, 0.5]], [[1, 0], [1, 0.5]], [[1, 1], [0, 0.5]]] net = cx.Network("XOR") net.add(cx.Layer("input", 2)) net.add(cx.Layer("hidden1", 3, activation="relu")) net.add(cx.Layer("hidden2", 4, activation="relu")) net.add(cx.Layer("output", 2, activation="softmax")) net.connect("input", "hidden1") net.connect("hidden1", "hidden2") net.connect("hidden2", "output") net.compile(error='mean_squared_error', optimizer=cx.SGD(lr=0.3, momentum=0.9)) # NOTE: # net = Network("XOR", 2, 3, 4, 1, activation="sigmoid") # is the same as: # net = Network("XOR") # net.add(Layer("input", shape=2)) # net.add(Layer("hidden1", shape=3, activation="sigmoid")) # net.add(Layer("hidden2", shape=4, activation="sigmoid")) # net.add(Layer("output", shape=1, activation="sigmoid")) # net.connect("input", "hidden1") # net.connect("hidden1", "hidden2") # net.connect("hidden2", "output") net.dataset.load(ds) net.train(2000, report_rate=10, accuracy=1) net.test()
while True: print('image %d' % k) plt.imshow(images[k], cmap=cmap, interpolation=interpolation) plt.draw() k += 1 answer = raw_input('RETURN to continue, q to quit...') if answer == 'q': break # to save: #np.savez_compressed('mydataset.npz', images=images, labels=labels) plt.ion() #--------------------------------------------------------------------------- net = cx.Network("MNIST") net.add(cx.Layer("input", shape=784, vshape=(28, 28))) net.add( cx.Layer("hidden1", shape=512, vshape=(16, 32), activation='relu', dropout=0.2)) net.add( cx.Layer("hidden2", shape=512, vshape=(16, 32), activation='relu', dropout=0.2)) net.add(cx.Layer("output", shape=10, activation='softmax'))
import conx as cx net = cx.Network("Image") net.add(cx.ImageLayer("input", (28, 28), 1)) net.add(cx.Conv2DLayer("conv1", 10, (5, 5), activation="relu")) net.add(cx.Conv2DLayer("conv2", 10, (5, 5), activation="relu")) net.add(cx.MaxPool2DLayer("pool1", pool_size=(2, 2))) net.add(cx.FlattenLayer("flatten")) net.add(cx.Layer("hidden1", 20, activation="relu")) net.add(cx.Layer("output", 10, activation="softmax")) net.connect() net.compile(error='categorical_crossentropy', optimizer="adam") net.dataset.get("mnist") # NOTE: # net = Network("XOR", 2, 3, 4, 1, activation="sigmoid") # is the same as: # net = Network("XOR") # net.add(Layer("input", shape=2)) # net.add(Layer("hidden1", shape=3, activation="sigmoid")) # net.add(Layer("hidden2", shape=4, activation="sigmoid")) # net.add(Layer("output", shape=1, activation="sigmoid")) # net.connect("input", "hidden1") # net.connect("hidden1", "hidden2") # net.connect("hidden2", "output") #net.dataset.load(ds) #net.train(2000, report_rate=10, accuracy=1)
import conx as cx net = cx.Network("Add") net.debug = True net.add(cx.Layer("one", 1)) net.add(cx.Layer("two", 1)) net.add(cx.AddLayer("add")) net.add(cx.Layer("hidden", 2)) net.add(cx.Layer("output", 1)) net.connect("one", "add") net.connect("two", "add") net.connect("add", "hidden") net.connect("hidden", "output") net.compile(error="mse", optimizer="adam")