np.random.seed(1)

#%% Prepare...
n_classes = 2
ploty = [-6, 6, 100]
plotx = [-6, 6, 100]
X, Y = make_spiral(n_arms=n_classes, noise=.4)

#%% Build ANN.
sx = theano.tensor.matrix('x')
sy = theano.tensor.matrix('y')

h = SimpleNode(sx, 2, 4)
h2 = SimpleNode(h, 4, 2)
out = SimpleNode(h2, 2, 1, nlin=pynnet.nlins.sigmoid)
cost = errors.mse(out, sy)

theano.config.blas.ldflags = ''
eval = theano.function([sx], out.output)
test = theano.function([sx, sy], cost.output)
train = theano.function([sx, sy],
                        cost.output,
                        updates=get_updates(cost.params, cost.output, 0.01))

print("Error at start:", test(X, Y))

for i in range(200000):
    train(X, Y)
print("Error after 200000:", test(X, Y))

示例#2
0
np.random.seed(1)

#%% Prepare...
n_classes = 2
ploty = [-6, 6, 100]
plotx = [-6, 6, 100]
X, Y = make_spiral(n_arms=n_classes, noise=.4)

#%% Build ANN.
sx = theano.tensor.matrix('x')
sy = theano.tensor.matrix('y')

h = SimpleNode(sx, 2, 4)
h2 = SimpleNode(h, 4, 2)
out = SimpleNode(h2, 2, 1, nlin=pynnet.nlins.sigmoid)
cost = errors.mse(out, sy)

theano.config.blas.ldflags=''
eval = theano.function([sx], out.output)
test = theano.function([sx, sy], cost.output)
train = theano.function([sx, sy], cost.output,
                        updates=get_updates(cost.params, cost.output, 0.01))

print("Error at start:", test(X, Y))

for i in range(200000):
    train(X, Y)
print("Error after 200000:", test(X, Y))

def pfunc(x):
  return 1. - eval(x)
示例#3
0
trainx = trainseq
trainy = theano.shared(numpy.concatenate([trainseq.get_value()[1:], trainseq.get_value()[:1]], axis=0))

testseq = [[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 0, 0], [0, 0, 1], [1, 0, 0], [0, 1, 1], [1, 1, 0], [1, 0, 1], [1, 0, 0]]
testx = testseq
testy = testseq[1:] + testseq[:1]

x = theano.tensor.matrix('x')
y = theano.tensor.matrix('y')

map_in = SimpleNode(x, 3, 6)
rn = RecurrentWrapper(map_in, lambda x_n: SimpleNode(x_n, 12, 6),
                      outshp=(6,), name='rl')
out = SimpleNode(rn, 6, 3)

cost = errors.mse(out, y)

eval_sub = theano.function([x], out.output)
def eval(x):
    res = eval_sub(x)
    # we clear the memory of the recurrent layer between input
    # sequences because otherwise the network starts in an unknown
    # state.
    rn.clear()
    return res

test_sub = theano.function([x, y], cost.output)
def test(x, y):
    res = test_sub(x, y)
    # clear here too
    rn.clear()
示例#4
0
testseq = [[0, 0, 0], [0, 1, 0], [1, 0, 1], [1, 0, 0], [0, 0, 1], [1, 0, 0],
           [0, 1, 1], [1, 1, 0], [1, 0, 1], [1, 0, 0]]
testx = testseq
testy = testseq[1:] + testseq[:1]

x = theano.tensor.matrix('x')
y = theano.tensor.matrix('y')

map_in = SimpleNode(x, 3, 6)
rn = RecurrentWrapper(map_in,
                      lambda x_n: SimpleNode(x_n, 12, 6),
                      outshp=(6, ),
                      name='rl')
out = SimpleNode(rn, 6, 3)

cost = errors.mse(out, y)

eval_sub = theano.function([x], out.output)


def eval(x):
    res = eval_sub(x)
    # we clear the memory of the recurrent layer between input
    # sequences because otherwise the network starts in an unknown
    # state.
    rn.clear()
    return res


test_sub = theano.function([x, y], cost.output)