Esempio n. 1
0
        [0, 1, 0],  #2
        [1, 1, 1],  #7
    ]

    gt = [
        [0],
        [6],
        [4],
        [3],
        [2],
        [7],
    ]

    num_class = 8

    net = nn.Sequential(nn.Linear(3, 4), nn.ReLU(), nn.Linear(4, 1000),
                        nn.ReLU(), nn.Linear(1000, 800), nn.ReLU(),
                        nn.Linear(800, 100), nn.ReLU(), nn.Linear(100, 8),
                        nn.ReLU(), nn.Linear(8, num_class)).to_gpu()

    #### learning
    optim = optim.RMSprop(net.params(), lr=1e-4)
    num_epoch = int(1e+4)
    for epoch in range(num_epoch):
        data, label = jhML.Variable(x).to_gpu(), jhML.Variable(gt).to_gpu()
        optim.zero_grad()
        pred = net(data)
        loss = F.softmax_cross_entropy(pred, label)
        loss.backward()
        optim.step()
Esempio n. 2
0
        os.path.dirname(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
import jhML
import jhML.layers as nn
import jhML.functions as F
import jhML.optimizer as optim
from jhML.tutorial.dataset import MNIST


def get_accuracy(pred, gt):
    pred = jhML.as_cpu(pred)
    gt = jhML.as_cpu(gt)
    return np.sum((pred == gt).astype(float)) / len(gt)


net = nn.Sequential(nn.Linear(28 * 28, 28 * 28), nn.ReLU(),
                    nn.Linear(28 * 28, 1000), nn.ReLU(), nn.Linear(1000, 1000),
                    nn.ReLU(), nn.Linear(1000, 100), nn.ReLU(),
                    nn.Linear(100, 10), nn.ReLU(),
                    nn.Linear(10, MNIST.num_class))

if __name__ == "__main__":

    using_gpu = True
    num_class = 10
    dataset_mnist_train = MNIST(train=True, flatten=True)
    dataset_mnist_test = MNIST(train=False, flatten=True)
    train_dataloader = jhML.Dataloader(dataset_mnist_train,
                                       batch_size=256,
                                       gpu=using_gpu,
                                       drop_last=True)
Esempio n. 3
0
import jhML
import jhML.layers as nn
import jhML.functions as F
import jhML.optimizer as optim
from jhML.tutorial.dataset import MNIST

def get_accuracy(pred, gt):
    pred = jhML.as_cpu(pred)
    gt = jhML.as_cpu(gt)
    return np.sum((pred == gt).astype(float))/len(gt)
    


net = nn.Sequential(
        nn.Linear(28*28, 28*28),
        nn.ReLU(),
        nn.Linear(28*28, 1000),
        nn.ReLU(),
        nn.Linear(1000, 1000),
        nn.ReLU(),
        nn.Linear(1000, 100),
        nn.ReLU(),
        nn.Linear(100, 10),
        nn.ReLU(),
        nn.Linear(10, MNIST.num_class)
            )

if __name__ == "__main__":

    using_gpu = True
    num_class = 10
Esempio n. 4
0
        os.path.dirname(
            os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
import jhML
import jhML.layers as nn
import jhML.functions as F
import jhML.optimizer as optim
from jhML.tutorial.dataset import MNIST


def get_accuracy(pred, gt):
    pred = jhML.as_cpu(pred)
    gt = jhML.as_cpu(gt)
    return np.sum((pred == gt).astype(float)) / len(gt)


net = nn.Sequential(nn.Linear(28 * 28, 28 * 28), nn.ReLU(),
                    nn.Linear(28 * 28, 1000), nn.ReLU(), nn.Linear(1000, 1000),
                    nn.Dropout(), nn.ReLU(), nn.Linear(1000, 100),
                    nn.Dropout(), nn.ReLU(), nn.Linear(100, 10), nn.Dropout(),
                    nn.ReLU(), nn.Linear(10, MNIST.num_class))

if __name__ == "__main__":

    using_gpu = True
    num_class = 10
    dataset_mnist_train = MNIST(train=True, flatten=True)
    dataset_mnist_test = MNIST(train=False, flatten=True)
    train_dataloader = jhML.Dataloader(dataset_mnist_train,
                                       batch_size=256,
                                       gpu=using_gpu,
                                       drop_last=True)