Esempio n. 1
0
def train(batch_size = 32, save_path = 'chk_classifier.hdf5', resume_flag = False):

	checkpointer = ModelCheckpoint(monitor = 'val_acc', 
				       filepath = save_path, 
				       verbose = 1, 
				       save_best_only = True, 
				       save_weights_only = True)
	early_stopper = EarlyStopping(monitor = 'val_loss', 
				      min_delta = 0.001, 
				      patience = 20)
	lr_reducer = ReduceLROnPlateau(monitor = 'val_loss',
				       factor = 0.8,
				       verbose = 1,
				       patience = 4,
				       min_lr = 2E-6)

	data_loader = DataLoader(data_path = 'data')
	data_loader.load_data()

	model = Classifier(data_loader.max_feature_value,
			   data_loader.max_sequence_len,
			   num_classes = len(data_loader.classes))

	classifier = model()
	classifier.summary()
	train_steps = data_loader.train_size // batch_size
	val_steps = data_loader.val_size // batch_size

	if resume_flag and os.path.exists(save_path):
		classifier.load_weights(save_path)

	classifier.fit_generator(generator = data_loader.generate(mode = 'train'),
			    steps_per_epoch = train_steps,
			    epochs = 350,
			    verbose = 1,
			    validation_data = data_loader.generate(mode = 'val'),
			    validation_steps = val_steps,
			    callbacks = [checkpointer, early_stopper, lr_reducer])
Esempio n. 2
0
def test(batch_size = 1, save_path = 'chk_classifier.hdf5'):
	data_loader = DataLoader(data_path = 'data')
	data_loader.load_data()

	model = Classifier(data_loader.max_feature_value,
			   data_loader.max_sequence_len,
			   num_classes = len(data_loader.classes))

	classifier = model()
	classifier.load_weights(save_path)

	generator = data_loader.generate(batch_size = batch_size, mode = 'val')
	x, y = next(generator)
	
	pred = classifier.predict(x)
	y_in = data_loader.decode_label(y)
	y_out = data_loader.decode_label(pred)
	
	[print('Ground Truth: {} Prediction: {}'.format(y1, y2)) for y1, y2 in zip(y_in, y_out)]
	metrics = classifier.evaluate(x, y, verbose = 1)
	[print('{}: {}\n'.format(x, y)) for x, y in zip(classifier.metrics_names, metrics)]
	print(metrics)
Esempio n. 3
0
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

# Import Dataset
from data_loader import DataLoader
data = DataLoader(64, 64)
x_test, y_test = data.get_flatten_data()
print(x_test.shape)

x_train, y_train = data.generate(10000, 64, 64)
print(x_train.shape)

# Training Parameters
learning_rate = 0.0001
num_epochs = 100

# Network Parameters
WIDTH = data.WIDTH
HEIGHT = data.HEIGHT
CHANNELS = data.CHANNELS
NUM_INPUTS = WIDTH * HEIGHT * CHANNELS
NUM_OUTPUTS = WIDTH * HEIGHT * CHANNELS

x_train = x_test.reshape((-1, NUM_INPUTS))
y_train = y_test.reshape((-1, NUM_INPUTS))