Ejemplo n.º 1
0
Archivo: main.py Proyecto: VamosC/MNIST
def train():
    images, labels = process_data('./data/train-images-idx3-ubyte',
                                  './data/train-labels-idx1-ubyte')
    train_set = Mnist(images, labels)
    # train_loader = DataLoader(train_set, batch_size=64,
    #                           shuffle=True, num_workers=8, pin_memory=True)
    train_loader = DataLoader(train_set, batch_size=64, shuffle=True)
    model = Convnet()

    optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
    lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                   step_size=20,
                                                   gamma=0.5)
    aver = Averager()
    for epoch in range(1, 11):
        lr_scheduler.step()
        model.train()
        for i, batch in enumerate(train_loader, 1):
            # image, label = [_.cuda() for _ in batch]
            image, label = batch
            score = model(image)
            loss = F.cross_entropy(score, label.long())
            acc = count_acc(score, label, aver)
            print('epoch %d batch %d acc: %f' % (epoch, i, acc))
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
        print('epoch %d acc: %f' % (epoch, aver.item()))
    save_model(model, 'model-1')
Ejemplo n.º 2
0
def main():
    mnist = Mnist(os.path.join(os.getcwd(), "datasets/train.csv"), rows=100)
    for dataset in mnist.random()[:1]:
        # reshape from 1x784 to 28x28 and scale to 0-1
        array = numpy.asfarray(dataset.data).reshape((28, 28)) / 255
        fig, ax = plt.subplots()
        im = ax.imshow(array, cmap=plt.get_cmap('Greys'), interpolation='None')
        fig.colorbar(im)
        ax.set_title(dataset.label)
        plt.show()
Ejemplo n.º 3
0
def main():
    """ EntryPoint """

    # Set Data ...
    df = pd.read_csv('mnist.csv', delimiter=',')
    dataset = df.values.tolist()
    # print(dataset)

    s = pd.Series(list(range(10)))
    dummy = pd.get_dummies(s)
    one_hot = dummy.values.tolist()
    # print(one_hot[0])

    mnist = Mnist(dataset, one_hot)
    train_inputs, validation_inputs, test_inputs = mnist.load_inputs()
    train_targets, validation_targets, test_targets = mnist.load_targets()
    print("Data loaded ...")
    # print(train_inputs[0])

    input_size = train_inputs.shape[0]  # 784
    output_size = train_targets.shape[0]  # 10
    hidden_layer_size = 50

    ann = ANN([input_size, hidden_layer_size, hidden_layer_size, output_size])

    n_epochs = 150

    # Learn ...
    validation_accuracy, epochs = ann.learning(train_inputs, train_targets,
                                               validation_inputs,
                                               validation_targets, n_epochs)

    # Test ...
    test_accuracy, classes_accuracy = ann.evaluate(test_inputs, test_targets,
                                                   mnist)

    print('Test accuracy:', test_accuracy)

    plt.figure(1)
    plt.plot(epochs, validation_accuracy)
    plt.xlabel('Epoch')
    plt.ylabel('Accuracy')
    plt.grid()
    plt.title('Validation Accuracy')

    plt.figure(2)
    y_pos = np.arange(len(range(10)))
    plt.bar(y_pos, classes_accuracy)
    plt.xticks(y_pos, range(10))
    plt.ylabel('Accuracy')
    plt.title('Accuracy for each Class')
    plt.show()
Ejemplo n.º 4
0
 def __init__(self, mnist_type,is_train,max_data_num,transform):
     self.is_train = is_train
     if mnist_type == "MNIST":
         self.data_type = Mnist(is_train, max_data_num)        
     elif mnist_type == "MNIST_M":            
         self.data_type = MnistM(is_train, max_data_num)        
     elif mnist_type == "SVHN":            
         self.data_type = Svhn(is_train, max_data_num)   
     elif mnist_type == "SYNTHDIGITS":   
         self.data_type = SynthDigits(is_train, max_data_num)
     #self.data_type = np.swapaxes(self.data_type, 0,2)
     self.file_size = self.data_type.file_size
     self.transform = transform
Ejemplo n.º 5
0
def main():
    latent_dim = 32
    original_mnist = Mnist()
    ld_mnist = LdMnist("ld_mnist_dataset_v3")
    class_decoder = tf.keras.models.load_model(
        os.path.join("saved_models_v3", "class_decoder"))

    linear_model = create_and_train_linear_model(latent_dim, ld_mnist)
    print("Linear model evaluation:")
    evaluate(linear_model, ld_mnist, original_mnist, class_decoder)
    del linear_model

    middle_layers_model = create_and_train_middle_layers(latent_dim, ld_mnist)
    print("Middle layers trained from scratch evaluation:")
    evaluate(middle_layers_model, ld_mnist, original_mnist, class_decoder)
    del middle_layers_model
Ejemplo n.º 6
0
Archivo: main.py Proyecto: VamosC/MNIST
def test():
    images, labels = process_data('./data/t10k-images-idx3-ubyte',
                                  './data/t10k-labels-idx1-ubyte')
    test_set = Mnist(images, labels)
    # train_loader = DataLoader(train_set, batch_size=64,
    #                           shuffle=True, num_workers=8, pin_memory=True)
    test_loader = DataLoader(test_set, batch_size=64, shuffle=True)
    model = Convnet()
    model.load_state_dict(torch.load('./model/model-1.pth'))
    model.eval()
    aver = Averager()
    for i, batch in enumerate(test_loader, 1):
        # image, label = [_.cuda() for _ in batch]
        image, label = batch
        score = model(image)
        count_acc(score, label, aver)
    print('test acc: %f' % aver.item())
Ejemplo n.º 7
0
def main(unused_argv):

    tf.logging.set_verbosity(tf.logging.DEBUG)

    mnist = Mnist("model")

    # download training and eval data
    mnist_data = tf.contrib.learn.datasets.load_dataset("mnist")
    train_data = mnist_data.train.images  # Returns np.array
    train_labels = np.asarray(mnist_data.train.labels, dtype=np.int32)
    eval_data = mnist_data.test.images  # Returns np.array
    eval_labels = np.asarray(mnist_data.test.labels, dtype=np.int32)

    mnist.train(train_data, train_labels)

    results = mnist.evaluate(eval_data, eval_labels)
    print(results)
Ejemplo n.º 8
0
import numpy as np
import image_viewer
from fc import FullyConnected
from mnist import Mnist

mnist = Mnist()
train_x,train_y = mnist.get_train_data()
test_x,test_y = mnist.get_test_data()

fc = FullyConnected(train_x, train_y, test_x, test_y)
fc.train()

Ejemplo n.º 9
0
    one_hot_label[one_hot_label == 1] = 0.99
    return one_hot_label
  
def evaluate(mnist_data, network):
    #corrects, wrongs = network.evaluate(mnist.trainImages, mnist.trainLabels)
    #logger.info("{:.2f}% Correct in training data".format((corrects / (corrects + wrongs)) * 100))
    #trainingPercent.append("{:.2f}%".format((corrects / (corrects + wrongs)) * 100))

    corrects, wrongs = network.evaluate(mnist.testImages, mnist.testLabels)
    #logger.info("{:.2f}% Correct in test data".format((corrects / (corrects + wrongs)) * 100))

    return "{:.2f}%".format((corrects / (corrects + wrongs)) * 100)


logger.debug("START")
mnist = Mnist("train-images.idx3-ubyte", "train-labels.idx1-ubyte",
               "t10k-images.idx3-ubyte",  "t10k-labels.idx1-ubyte")
shuffledList = list(range(len(mnist.trainImages)))
random.shuffle(shuffledList)

epochs = 2 
 
testPercent = []
trainingPercent = []

from network import Network

nn_batch = Network([28 * 28, 100, 10], 0.2)
nn_single = Network([28 * 28, 100, 10], 0.2)

for epoch in range(epochs):
    logger.info("epoch {}".format(epoch))
Ejemplo n.º 10
0
batch_size = 32  # batch size
cat_dim = 10  # total categorical factor
con_dim = 2  # total continuous factor
rand_dim = 38
num_epochs = 30
debug_max_steps = 1000
save_epoch = 5
max_epochs = 50

#
# inputs
#

# MNIST input tensor ( with QueueRunner )
data = Mnist(batch_size=batch_size, num_epochs=num_epochs)
num_batch_per_epoch = data.train.num_batch

# input images and labels
x = data.train.image
y = data.train.label

# labels for discriminator
y_real = tf.ones(batch_size)
y_fake = tf.zeros(batch_size)

# discriminator labels ( half 1s, half 0s )
y_disc = tf.concat(axis=0, values=[y, y * 0])

#
# create generator
Ejemplo n.º 11
0
 def setUp(self):
     self._mnist = Mnist(
         os.path.join(os.path.dirname(__file__), 'resources/mnist.csv'))
Ejemplo n.º 12
0
cat_dim = 10  # total categorical factor
con_dim = 2  # total continuous factor
rand_dim = 38
debug_max_steps = 1000
# 每5个阶段保存一次
save_epoch = 5
# 最大训练阶段数
# (一阶段为训练完整数据集一次)
max_epochs = 50

#
# inputs
#

# MNIST input tensor ( with QueueRunner )
data = Mnist(batch_size=batch_size)
# 每阶段训练的批次数量
num_batch_per_epoch = data.train.num_batch

# input images and labels
# 训练数据和标签
x = data.train.image
y = data.train.label

# labels for discriminator
# 假设所有的图片都为真
y_real = tf.ones(batch_size)
y_fake = tf.zeros(batch_size)

# discriminator labels ( half 1s, half 0s )
# 连接成一个一维tensor,前面部分为1,后面部分为0
Ejemplo n.º 13
0
 def refresh_inputs(x):
   mnist_dl, randomthing = x
   a = asl.refresh_iter(mnist_dl, lambda x: Mnist(asl.util.image_data(x)))
   return [a, random.Random(0)]
Ejemplo n.º 14
0
from collections import defaultdict
import numpy as np
from PIL import Image
from mnist import Mnist

mnist_model = Mnist()


def resolve(file_path):
    img = Image.open(file_path, 'r')
    # img.show()
    img_array = np.array(img)

    def color_to_coordinates(image):
        d = defaultdict(list)  # matches color with coordinates
        for i, u in enumerate(img_array):
            for j, v in enumerate(u):
                t = tuple(v)
                d[t].append((i, j))
        return d

    def count_to_color(clr_to_coor):
        d2 = defaultdict(list)
        for x, y in clr_to_coor.items():
            d2[len(y)].append(x)
        return d2

    def max_color(cnt_to_clr):
        k = iter(sorted(cnt_to_clr.keys(), reverse=True))
        while True:
            kk = next(k)
Ejemplo n.º 15
0
# -*- coding:utf-8 -*-
from PIL import Image
from flask import Flask, request
from flask_cors import CORS
from mnist import Mnist
import json
from io import BytesIO

app = Flask(__name__)
CORS(app)

mn = Mnist()


@app.route("/", methods=['POST'])
def index():
    img = request.files.get("img_photo")
    byte_io = BytesIO(img.read())
    img = Image.open(byte_io)
    reco_result = mn.get_pic_number(img)
    del byte_io
    del img
    return json.dumps({"result": str(reco_result[0])})


if __name__ == "__main__":
    app.run(host="0.0.0.0", port=5000)
Ejemplo n.º 16
0
import time

import numpy as np
import torch

from mnist import Mnist
from mnist_cnn_pytorch import Net
from operatorDemo import *

mnist = Mnist("./data/mnist/")
n_epoch = 1


def test_pytorch():
    x_train, y_train, x_valid, y_valid = map(
        torch.tensor, (mnist.tr_x, mnist.tr_y, mnist.te_x, mnist.te_y))
    model = Net()
    print(model)
    time_start = time.time()
    x_valid = torch.tensor(x_valid, dtype=torch.float32)
    for i in range(n_epoch):
        model.forward(x_valid)

    time_end = time.time()
    time_cost = time_end - time_start
    print("totally cost %.3f sec" % time_cost)


def test_numpy_serial():
    np.random.seed(1)
    test_x = mnist.te_x[:, ].reshape(10000, 28, 28, 1)