def main():
    # Training settings
    batch_size = 100
    test_batch_size = 1000
    epochs = 5
    lr = 0.01
    momentum = 0.5
    no_cuda = False
    seed = 1
    log_interval = 10
    use_cuda = not no_cuda and torch.cuda.is_available()

    torch.manual_seed(seed)

    device = torch.device("cuda" if use_cuda else "cpu")

    kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
    train_loader = torch.utils.data.DataLoader(
        HASY('../data', train=True, download=True,
             transform=transforms.Compose([transforms.ToTensor()])),
        batch_size=batch_size, shuffle=True, **kwargs)

    hasy_tools.load_data()

    test_loader = torch.utils.data.DataLoader(
        HASY('../data', train=False,
             transform=transforms.Compose([transforms.ToTensor()])),
        batch_size=test_batch_size, shuffle=True, **kwargs)

    model = Net().to(device)
    optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum)

    for epoch in range(1, epochs + 1):
        train(log_interval, model, device, train_loader, optimizer, epoch)
        test(model, device, test_loader)
 def __init__(self):
     self.width = 128
     self.height = 128
     self.white = 255
     tk.Tk.__init__(self)
     self.previous_x = self.previous_y = 0
     self.x = self.y = 0
     self.points_recorded = []
     self.image1 = Image.new("L", (self.width, self.height), self.white)
     self.draw = ImageDraw.Draw(self.image1)
     self.canvas = tk.Canvas(self,
                             width=self.width,
                             height=self.height,
                             bg="white",
                             cursor="cross")
     self.canvas.pack(side="top", fill="both", expand=True)
     # self.button_print = tk.Button(self, text = "Display points", command = self.print_points)
     # self.button_print.pack(side="top", fill="both", expand=True)
     self.button_clear = tk.Button(self,
                                   text="Clear",
                                   command=self.clear_all)
     self.button_clear.pack(side="top", fill="both", expand=True)
     self.canvas.bind("<Motion>", self.tell_me_where_you_are)
     self.canvas.bind("<B1-Motion>", self.draw_from_where_you_are)
     self.model = keras.models.load_model('model.h5')
     data = hasy_tools.load_data()
     self.labels = data['labels']
def load_data():
    data = hasy_tools.load_data()

    # One-Hot encoding
    data['y_train'] = np.eye(hasy_tools.n_classes)[data['y_train'].squeeze()]
    data['y_test'] = np.eye(hasy_tools.n_classes)[data['y_test'].squeeze()]

    # Preprocessing
    data['x_train'] = hasy_tools.preprocess(data['x_train'])
    data['x_test'] = hasy_tools.preprocess(data['x_test'])
    return data
def load_data():
    data = hasy_tools.load_data()

    # One-Hot encoding
    data['y_train'] = np.eye(hasy_tools.n_classes)[data['y_train'].squeeze()]
    data['y_test'] = np.eye(hasy_tools.n_classes)[data['y_test'].squeeze()]

    # Preprocessing
    data['x_train'] = hasy_tools.preprocess(data['x_train'])
    data['x_test'] = hasy_tools.preprocess(data['x_test'])
    return data
 def __init__(self):
     self.width = 128
     self.height = 128
     self.white = 255
     tk.Tk.__init__(self)
     self.previous_x = self.previous_y = 0
     self.x = self.y = 0
     self.points_recorded = []
     self.image1 = Image.new("L", (self.width, self.height), self.white)
     self.draw = ImageDraw.Draw(self.image1)
     self.canvas = tk.Canvas(self, width=self.width, height=self.height, bg="white", cursor="cross")
     self.canvas.pack(side="top", fill="both", expand=True)
     # self.button_print = tk.Button(self, text = "Display points", command = self.print_points)
     # self.button_print.pack(side="top", fill="both", expand=True)
     self.button_clear = tk.Button(self, text = "Clear", command = self.clear_all)
     self.button_clear.pack(side="top", fill="both", expand=True)
     self.canvas.bind("<Motion>", self.tell_me_where_you_are)
     self.canvas.bind("<B1-Motion>", self.draw_from_where_you_are)
     self.model = keras.models.load_model('model.h5')
     data = hasy_tools.load_data()
     self.labels = data['labels']
Exemple #6
0
#!/usr/bin/env python

# internal modules
import hasy_tools
import numpy as np
# 3rd party modules
from keras.callbacks import CSVLogger
from keras.layers import Dense, Dropout, Flatten
from keras.models import Sequential

# Load the data
data = hasy_tools.load_data()

x_train = data["x_train"]
y_train = data["y_train"]
x_validate = data["x_train"]
y_validate = data["y_train"]
x_test = data["x_test"]
y_test = data["y_test"]

# One-Hot encoding
y_train = np.eye(hasy_tools.n_classes)[y_train.squeeze()]
y_validate = np.eye(hasy_tools.n_classes)[y_validate.squeeze()]
y_test = np.eye(hasy_tools.n_classes)[y_test.squeeze()]

# Preprocessing
x_train = hasy_tools.preprocess(x_train)
x_validate = hasy_tools.preprocess(x_validate)
x_test = hasy_tools.preprocess(x_test)

# Define the model
#!/usr/bin/env python
# -*- coding: utf-8 -*-

# 3rd party modules
import keras
import numpy as np

# internal modules
import hasy_tools

# Load the data
data = hasy_tools.load_data()

x_train = data['x_train']
y_train = data['y_train']
x_validate = data['x_train']
y_validate = data['y_train']
x_test = data['x_test']
y_test = data['y_test']

# One-Hot encoding
y_train = np.eye(hasy_tools.n_classes)[y_train.squeeze()]
y_validate = np.eye(hasy_tools.n_classes)[y_validate.squeeze()]
y_test = np.eye(hasy_tools.n_classes)[y_test.squeeze()]

# Preprocessing
x_train = hasy_tools.preprocess(x_train)
x_validate = hasy_tools.preprocess(x_validate)
x_test = hasy_tools.preprocess(x_test)

# Load the model
Exemple #8
0
from keras.layers import Convolution2D, MaxPooling2D
from keras.layers.advanced_activations import PReLU
from keras.optimizers import Adam
from keras import backend as K

import hasy_tools as ht

batch_size = 128
nb_epoch = 5

# input image dimensions
img_rows, img_cols = ht.img_rows, ht.img_cols

# Load data
fold = 1
hasy_data = ht.load_data(mode='fold-{}'.format(fold), image_dim_ordering='tf')

x_train = hasy_data['x_train']
y_train = hasy_data['y_train']
x_test = hasy_data['x_test']
y_test = hasy_data['y_test']

# One-Hot encoding
y_train = np.eye(ht.n_classes)[y_train.squeeze()]
y_test = np.eye(ht.n_classes)[y_test.squeeze()]

# Preprocessing
x_train = ht.preprocess(x_train)
x_test = ht.preprocess(x_test)

# Define model
Exemple #9
0
# input image dimensions
img_rows, img_cols = 32, 32

accuracies = []

for fold in range(1, 11):
    print("#" * 80)
    print("k = %i" % fold)
    tf.reset_default_graph()

    # Load data
    dataset_path = os.path.join(os.path.expanduser("~"), 'hasy')
    hasy_data = ht.load_data(fold=fold,
                             normalize=True,
                             one_hot=True,
                             dataset_path=dataset_path,
                             flatten=False)
    train_x = hasy_data['train']['X'][:1000]
    train_y = hasy_data['train']['y'][:1000]
    test_x = hasy_data['test']['X']
    test_y = hasy_data['test']['y']

    # Define model
    network = input_data(shape=[None, img_rows, img_cols, 1], name='input')
    network = conv_2d(network, 32, 3, activation='prelu')
    network = conv_2d(network, 64, 3, activation='prelu')
    network = max_pool_2d(network, 2)
    network = dropout(network, keep_prob=0.25)
    network = fully_connected(network, 1024, activation='tanh')
    network = dropout(network, keep_prob=0.5)
Exemple #10
0
def main():
    # location of saved models
    root = pathlib.Path().absolute()
    models_folder = "models"
    PATH = "hasy-visionTeX-model.pt"
    PATH = os.path.join(root, models_folder, PATH)
    PATH = pathlib.Path(PATH)

    # Training settings
    batch_size = 50
    test_batch_size = 1000
    epochs = 10
    lr = 0.01
    adam_lr = 0.0001
    momentum = 0.5
    no_cuda = False
    seed = 1
    log_interval = 10
    use_cuda = not no_cuda and torch.cuda.is_available()

    torch.manual_seed(seed)

    device = torch.device("cuda" if use_cuda else "cpu")
    print ("Device",device)

    kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
    train_loader = torch.utils.data.DataLoader(
        HASY('../data', train=True, download=True,
             transform=transforms.Compose([transforms.ToTensor()])),
        batch_size=batch_size, shuffle=True, **kwargs)

    hasy_tools.load_data()

    test_loader = torch.utils.data.DataLoader(
        HASY('../data', train=False,
             transform=transforms.Compose([transforms.ToTensor()])),
        batch_size=test_batch_size, shuffle=True, **kwargs)

    model = Net().to(device)
    # optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum)
    adam_optimizer = optim.Adam(model.parameters(), lr=adam_lr) # Adam Optimizer instead of SGD: https://arxiv.org/pdf/1412.6980.pdf
    
    e = 0
    loss = None
    for epoch in range(1, epochs + 1):
        # train(log_interval, model, device, train_loader, optimizer, epoch)
        train(log_interval, model, device, train_loader, adam_optimizer, epoch)
        loss = test(model, device, test_loader)
        e = epoch
    
    # print("Model's state_dict:")
    # for param_tensor in model.state_dict():
    #     print(param_tensor, "\t", model.state_dict()[param_tensor].size())
    
    # print("Optimizer's state_dict:")
    # for var_name in adam_optimizer.state_dict():
    #     # print(var_name, "\t", optimizer.state_dict()[var_name])
    #     print(var_name, "\t", adam_optimizer.state_dict()[var_name])
    
    # torch.save(model.state_dict(), PATH)
    checkpoint = {'state_dict': model.state_dict(), 'optim_dict': adam_optimizer.state_dict()}
    torch.save(checkpoint, PATH)
    '''
Exemple #11
0
 def __init__(self, root, train=True, transform=None, target_transform=None, download=False):
     self.root = os.path.expanduser(root)
     self.data = hasy_tools.load_data(mode='fold-2') # see function comments
     self.train = train
     self.transform = transform
     self.target_transform = target_transform
Exemple #12
0
# Translation
symbol_id2index = ht.generate_index("%s/symbols.csv" % dataset_path)
symbolid2latex = ht._get_symbolid2latex("%s/symbols.csv" % dataset_path)
index2symbol_id = {}
for index, symbol_id in symbol_id2index.items():
    index2symbol_id[symbol_id] = index
index2latex = lambda n: symbolid2latex[index2symbol_id[n]]

with open("confusable-classes.yml", 'r') as stream:
    merge_classes = yaml.load(stream)

print("Load model")
model = load_model('keras-models/my_keras_model.h5')

print("Load data")
hasy_data = load_data(normalize=True, dataset_path=dataset_path)

print("edit data")
train_x = hasy_data['train']['X']
train_y = hasy_data['train']['y']
train_s = hasy_data['train']['source']
test_x = hasy_data['test']['X']
test_y = hasy_data['test']['y']
test_s = hasy_data['test']['source']

if K.image_dim_ordering() == 'th':
    train_x = train_x.reshape(train_x.shape[0], 1, img_rows, img_cols)
    test_x = test_x.reshape(test_x.shape[0], 1, img_rows, img_cols)
    input_shape = (1, img_rows, img_cols)
else:
    train_x = train_x.reshape(train_x.shape[0], img_rows, img_cols, 1)
 def __init__(self, root, train=True, transform=None, target_transform=None, download=False):
     self.root = os.path.expanduser(root)
     self.data = hasy_tools.load_data()
     self.train = train
     self.transform = transform
     self.target_transform = target_transform
Exemple #14
0
def test_load_data():
    from hasy_tools import load_data

    data = load_data()
batch_size = 128
nb_epoch = 1

# input image dimensions
img_rows, img_cols = 32, 32

accuracies = []
for fold in range(1, 11):
    tf.reset_default_graph()
    print("#" * 80)
    print("k = %i" % fold)

    # Load data
    dataset_path = os.path.join(os.path.expanduser("~"), 'hasy')
    hasy_data = ht.load_data(fold=fold,
                             normalize=True,
                             one_hot=True,
                             dataset_path=dataset_path)
    train_x = hasy_data['train']['X'][:1000]
    train_y = hasy_data['train']['y'][:1000]
    test_x = hasy_data['test']['X']
    test_y = hasy_data['test']['y']

    if K.image_dim_ordering() == 'th':
        train_x = train_x.reshape(train_x.shape[0], 1, img_rows, img_cols)
        test_x = test_x.reshape(test_x.shape[0], 1, img_rows, img_cols)
        input_shape = (1, img_rows, img_cols)
    else:
        train_x = train_x.reshape(train_x.shape[0], img_rows, img_cols, 1)
        test_x = test_x.reshape(test_x.shape[0], img_rows, img_cols, 1)
        input_shape = (img_rows, img_cols, 1)