/
XOR.py
50 lines (40 loc) · 1.1 KB
/
XOR.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import nn.nn as nn
import numpy as np
# XOR
###############################################################
net = nn([2, 3, 2], ['relu', 'softmax'], 'CrossEntropyLoss')
###############################################################
# Data generation
x = np.linspace(-40, 40, 40)
y = np.linspace(-40, 40, 40)
x = np.column_stack([[xi, yi] for xi in x for yi in y])
x = np.array(list(zip(x[0], x[1])))
y = []
for X in x:
xi, yi = X
if (xi > 0 and yi > 0) or (xi < 0 and yi <0):
y.append(1)
else:
y.append(0)
y = np.array(y)
index = np.arange(len(y))
np.random.shuffle(index)
x = x[index]
y = y[index]
# Params
bs = 10
lr = 1e-3
iterations = len(y) // bs
# training
for e in range(10000):
Loss = 0
for i in range(iterations):
start = i * bs
net.zero_grad()
out, loss = net(x[start: start+bs], y[start: start+bs])
Loss += loss
net.adam(10*lr)
if e%5 == 0:
net.viz(epoch=e, loss=Loss/iterations)
# net.viz(rows=2, cols=2)
print(f'epoch: {e}, loss: {Loss/iterations}')