Exemplo n.º 1
0
def train_model():
    model = build_model()
    print(model.summary())

    optimizer = optimizers.Adam(learning_rate=0.001)
    loss = losses.SparseCategoricalCrossentropy(from_logits=True)

    model.compile(optimizer=optimizer, loss=loss, metrics=['acc'])

    (x_train, y_train), (x_test, y_test) = load_preprocess_data()

    epochs = 10
    n_train = 60000
    n_test = 10000
    batch_size = 32
    steps_per_epoch = n_train // batch_size
    validation_steps = n_test // batch_size

    train_data_set = convert_to_data_set(x_train,
                                         y_train,
                                         repeat_times=epochs,
                                         shuffle_buffer_size=n_train,
                                         batch_size=batch_size)

    val_data_set = convert_to_data_set(x_test,
                                       y_test,
                                       repeat_times=epochs,
                                       shuffle_buffer_size=n_test,
                                       batch_size=batch_size)

    my_callbacks = []
    early_stopping_cb = callbacks.EarlyStopping(monitor='val_loss',
                                                patience=5,
                                                restore_best_weights=True)
    my_callbacks.append(early_stopping_cb)

    tensorboard_cb = callbacks.TensorBoard(log_dir='logs')
    my_callbacks.append(tensorboard_cb)

    checkpoint_path = 'models/base_cnn/ckpt'
    checkpoint_cb = callbacks.ModelCheckpoint(filepath=checkpoint_path,
                                              save_weights_only=True,
                                              save_best_only=True)
    my_callbacks.append(checkpoint_cb)

    history = model.fit(train_data_set,
                        epochs=epochs,
                        steps_per_epoch=steps_per_epoch,
                        validation_data=val_data_set,
                        validation_steps=validation_steps,
                        callbacks=my_callbacks)

    print('\n\n')
    train_result = model.evaluate(x_train, y_train)
    format_result(train_result, name='train')

    val_result = model.evaluate(x_test, y_test)
    format_result(val_result, name='val')

    return history
Exemplo n.º 2
0
def load_model():
    model = build_model()

    optimizer = optimizers.Adam(learning_rate=0.001)
    loss = losses.SparseCategoricalCrossentropy(from_logits=True)

    model.compile(optimizer=optimizer, loss=loss, metrics=['acc'])

    (x_train, y_train), (x_test, y_test) = load_preprocess_data()

    checkpoint_dir = 'models/base_cnn/'
    checkpoint = tf.train.latest_checkpoint(checkpoint_dir=checkpoint_dir)
    model.load_weights(checkpoint)

    return model
Exemplo n.º 3
0
# ## Load raw data
#
# Before training and testing a model, we need some data. The following code shows how to load a dataset using ``gumpy``.

# specify the location of the GrazB datasets
data_dir = './data/Graz'
subject = 'B01'

# initialize the data-structure, but do _not_ load the data yet
grazb_data = gumpy.data.GrazB(data_dir, subject)

# now that the dataset is setup, we can load the data. This will be handled from within the utils function,
# which will first load the data and subsequently filter it using a notch and a bandpass filter.
# the utility function will then return the training data.
x_train, y_train = utils.load_preprocess_data(grazb_data, True, LOWCUT,
                                              HIGHCUT, W0, Q, ANTI_DRIFT,
                                              CLASS_COUNT, CUTOFF, AXIS, FS)

# ## Augment data

# In[5]:

x_augmented, y_augmented = gumpy.signal.sliding_window(data=x_train[:, :, :],
                                                       labels=y_train[:, :],
                                                       window_sz=4 * FS,
                                                       n_hop=FS // 10,
                                                       n_start=FS * 1)
x_subject = x_augmented
y_subject = y_augmented
x_subject = np.rollaxis(x_subject, 2, 1)
Exemplo n.º 4
0
# External imports
from sklearn.naive_bayes import BernoulliNB, GaussianNB, MultinomialNB
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import MinMaxScaler
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

# Internal imports
from utils import load_preprocess_data, shuffle_split_dataset

print("Loading Data...")
clean_data = load_preprocess_data()
X = clean_data.drop('state', axis=1)
y = pd.DataFrame(clean_data['state'].values)
y.columns = ['state']


# X = X.iloc[:500000]
# y = y.iloc[:500000]
del clean_data

min_max_scaler = MinMaxScaler()
X_scaled = min_max_scaler.fit_transform(X)

del X
y = y['state'].to_numpy(dtype=np.float32)

X_train, y_train, X_test, y_test = shuffle_split_dataset(X_scaled, y)

print("\nX_train dims: ", X_train.shape)