Example #1
0
import numpy as np
import matplotlib.pyplot as plt

from otter import Variable
from otter.ops.loss import mean_squared_error
from otter.dam.graph import Graph
from otter.ops.activation import sigmoid
from otter.layers.common import Dense
from otter.optimizer import *

np.random.seed(2019)

with Graph() as g:
    n = 10000
    p = 10
    m = 5
    x = Variable(np.random.normal(0, 1, (n, p)))
    y = Variable(np.random.normal(0, 1, (n, m)))

    layer1 = Dense(output_shape=10, activation=sigmoid)
    layer2 = Dense(output_shape=m, activation=sigmoid)
    optimizer = GradientDescent(0.8)
    loss = mean_squared_error

    loss_array = []

    for i in range(300):
        if i % 50 == 0:
            print(i)
        a = layer1.forward(x)
        b = layer2.forward(a)
Example #2
0
    conv2 = Conv2D(out_channel=16,
                   kernel_size=(3, 3),
                   stride=(2, 2))

    flatten = Flatten()
    dense1 = Dense(output_shape=128)
    dense2 = Dense(output_shape=10)

    optimizer = GradientDescent(1e-4)

    loss_list = []
    acc_list = []
    norm1_list = []
    norm2_list = []
    g = Graph()

    iteration = 1000
    batch_size = 1024
    total_epoch = int(n / batch_size)

    for it_idx in range(iteration):
        print(f"The {it_idx}th iteration.")
        for epoch in tqdm(range(total_epoch)):

            x = x_train[epoch*batch_size: (epoch+1) * batch_size]
            y = y_train[epoch*batch_size: (epoch+1) * batch_size]

            x = Variable(x)
            y = Variable(y)
Example #3
0
# inverse_ones = a.reshape(n, 1, m) * np.ones((m, m))
#
# ds = eyed - np.multiply(ones, inverse_ones)
# avg_ds = np.average(ds, axis=0)
#
# print(ones)
# print(inverse_ones)
# print(eyed)
# print(ds)

from otter.dam.structure import Variable

a = Variable(np.random.normal(0, 1, (20, 10)))
#
b = softmax(a)
g = Graph()
# # c = a.sum(axis=1)
# # g.set_and_update_gradient(c, np.arange(20).reshape(20,1))
# g.update_gradient(b)
#
# print(a.gradient)
c = Variable(np.random.randint(0, 9, (20, 1)))

sliced = b.slice(c.value.reshape((len(c.value), )), axis=1)

mid1 = sliced.safe_log()
mid2 = mid1.average()
mid3 = mid2.neg()

g.update_gradient(mid3)
Example #4
0
        x_train = np.frombuffer(imgpath.read(), np.uint8,
                                offset=16).reshape(len(y_train), 28, 28)

    with gzip.open(paths[2], 'rb') as lbpath:
        y_test = np.frombuffer(lbpath.read(), np.uint8, offset=8)

    with gzip.open(paths[3], 'rb') as imgpath:
        x_test = np.frombuffer(imgpath.read(), np.uint8,
                               offset=16).reshape(len(y_test), 28, 28)

    return (x_train, y_train), (x_test, y_test)


(x_train, y_train), (x_test, y_test) = read_data()

conv = Conv2D(1, (5, 5), relu, (2, 2), bias=False)
conv.w = w

t = Variable(x_train[0].reshape(1, 1, 28, 28))
ans = conv.forward(t)
print(conv.w)
print(ans)
print(ans.shape)
plt.imshow(ans.value[0][0])
plt.show()

g = Graph()

g.update_gradient(ans)

print(t.gradient)