Ejemplo n.º 1
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import utils.plot as plot
from utils.preprocessing import *
from data.DigitSet import DigitSet
from data.DataSet import DataSet
import os

#%%
folder = os.path.join("files", "dataset")
dataset = DataSet(folder)
dataset.apply(apply_mean_centering)
dataset.apply(apply_unit_distance_normalization)
dataset.apply(lambda x: normalize_pressure_value(x, 512))

#%%
filename = os.path.join(folder, "10.43_23.03.2018_digitset.json")
digitset = DigitSet(filename)
scaled = digitset.copy()
# Apply transformations
scaled.apply(apply_mean_centering)
scaled.apply(apply_unit_distance_normalization)
scaled.apply(lambda x: normalize_pressure_value(x, 512))
if scaled.time_is_dt():
    scaled.convert_dt_to_t()

#%%
digit, label = digitset[6]
plot.show_digit(digit, label=label, 
                show_lines=True, show_points=True, 
Ejemplo n.º 2
0
print("Test Data Len:", len(dataset.test_data))

#%% Load Model
from keras.models import load_model

TRAINED_MODEL = os.path.join("files", "checkpoints", "1525696834.4091375",
                             "regularized_3x512_gru-30-0.97.hdf5")
model = load_model(TRAINED_MODEL)

#%%
import numpy as np
from utils.preprocessing import *
from functools import partial
from utils.research_preprocessing import add_occlusions

dataset.apply(clean_repeat_points)

NUM_SAMPLES = 50
ANGLES_TO_ROTATE = [5, 10, 15, 45, -5, -10, -15, -45]

DROPOUTS_TO_TRY = np.linspace(0.0, 0.99, 20)

scores = []

for drop in DROPOUTS_TO_TRY:
    curr = dataset.copy()
    curr.apply(partial(add_occlusions, dropout_percentage=drop))
    curr.apply(apply_mean_centering)
    curr.apply(apply_unit_distance_normalization)
    curr.apply(
        partial(spline_interpolate_and_resample, num_samples=NUM_SAMPLES))
Ejemplo n.º 3
0
PARAM_NUM_EPOCHS = 15
PARAM_BATCH_SIZE = 300
NUM_SAMPLES = 50

# Paths
dataset_folder_path = os.path.join("files", "dataset")
#%% Prepare Data
# Imports
from utils.preprocessing import *
from data.DataSet import DataSet
from functools import partial
import numpy as np

dataset = DataSet()
dataset.load(dataset_folder_path, test_set_percentage=0.333, validation_set_percentage=0)
dataset.apply(apply_mean_centering)
dataset.apply(apply_unit_distance_normalization)
#dataset.apply(partial(normalize_pressure_value, max_pressure_val=512))
dataset.apply(partial(spline_interpolate_and_resample, num_samples=NUM_SAMPLES))
dataset.expand_many(partial(rotate_digit, degrees=[5, 10, 15, 45, -5, -10, -15, -45]))
dataset.expand(reverse_digit_sequence)
# dataset.apply(lambda digit: convert_xy_to_derivative(digit, normalize=False))
#dataset.apply(partial(convert_xy_to_derivative, normalize=True))

#%% Split Train, Valid, Test
# Imports
import numpy as np
from sklearn.model_selection import train_test_split

X_train_valid = np.array(dataset.train_data)
X_test = np.array(dataset.test_data)
             validation_set_percentage=0.3333)

print("Training Data Len:", len(dataset.train_data))
print("Validation Data Len:", len(dataset.valid_data))
print("Test Data Len:", len(dataset.test_data))

#%%
import numpy as np
from utils.preprocessing import *
from functools import partial
from utils.research_preprocessing import add_occlusions
from models.regularized_3x512_gru import Regularized3x512GRU
import os.path
import pickle

dataset.apply(apply_mean_centering)
dataset.apply(apply_unit_distance_normalization)

ANGLES_TO_ROTATE = [5, 10, 15, 45, -5, -10, -15, -45]

NUM_EPOCHS = 20
PARAM_BATCH_SIZE = 300

#NUM_SAMPLES_TO_TRY =  [300, 200, 150, 100, 75, 50, 25, 10]
NUM_SAMPLES_TO_TRY = [350, 250, 75]

scores_valid = []
scores_test = []

for num_samples in NUM_SAMPLES_TO_TRY:
    curr = dataset.copy()
dataset.load(dataset_folder_path,
             test_set_percentage=0.2,
             validation_set_percentage=0.3333)

print("Training Data Len:", len(dataset.train_data))
print("Validation Data Len:", len(dataset.valid_data))
print("Test Data Len:", len(dataset.test_data))

#%%
NUM_SAMPLES = 50
ANGLES_TO_ROTATE = [5, 10, 15, 45, -5, -10, -15, -45]

from utils.preprocessing import *
from functools import partial

dataset.apply(apply_mean_centering)
#dataset.apply(apply_unit_distance_normalization)
dataset.apply(partial(spline_interpolate_and_resample,
                      num_samples=NUM_SAMPLES))
dataset.expand_many(partial(rotate_digit, degrees=ANGLES_TO_ROTATE))
dataset.expand(reverse_digit_sequence)

print("Training Data Len:", len(dataset.train_data))
print("Validation Data Len:", len(dataset.valid_data))
print("Test Data Len:", len(dataset.test_data))

#%%
import numpy as np

X_train = np.array(dataset.train_data)
X_valid = np.array(dataset.valid_data)