def train(): settings = Settings() batch_size = settings.get_training_parameters('batch_size') epochs = settings.get_training_parameters('epochs') dictionary_size = settings.get_training_parameters('dictionary_size') max_headline_length = settings.get_training_parameters('max_headline_length') glove = Glove(dictionary_size) glove.load_embedding() model_builder = Model1Builder() \ .set_input('glove', glove) \ .set_parameter('max_headline_length', max_headline_length) model = model_builder() preprocessor = Preprocessor(model) preprocessor.set_encoder('glove', glove) preprocessor.set_parameter('max_headline_length', max_headline_length) preprocessor.load_data(['headline', 'is_top_submission']) training_input = [preprocessor.training_data['headline']] validation_input = [preprocessor.validation_data['headline']] training_output = [preprocessor.training_data['is_top_submission']] validation_output = [preprocessor.validation_data['is_top_submission']] class_weights = calculate_class_weights(preprocessor.training_data['is_top_submission'], [ol.name for ol in model.output_layers]) callbacks = CallbackBuilder(model, [CsvLogger, CsvPlotter, ConfigLogger, ModelSaver])() model.fit(training_input, training_output, batch_size=batch_size, epochs=epochs, callbacks=callbacks, validation_data=(validation_input, validation_output), class_weight=class_weights)
def train(): settings = Settings() batch_size = settings.get_training_parameters('batch_size') epochs = settings.get_training_parameters('epochs') model_builder = Model4Builder() model = model_builder() preprocessor = Preprocessor(model) preprocessor.load_data(['category', 'is_top_submission']) training_input = [preprocessor.training_data['category']] validation_input = [preprocessor.validation_data['category']] training_output = [preprocessor.training_data['is_top_submission']] validation_output = [preprocessor.validation_data['is_top_submission']] class_weights = calculate_class_weights(preprocessor.training_data['is_top_submission'], [ol.name for ol in model.output_layers]) callbacks = CallbackBuilder(model, [CsvLogger, CsvPlotter, ConfigLogger, ModelSaver])() model.fit(training_input, training_output, batch_size=batch_size, epochs=epochs, callbacks=callbacks, validation_data=(validation_input, validation_output), class_weight=class_weights)
def train(): settings = Settings() batch_size = settings.get_training_parameters('batch_size') epochs = settings.get_training_parameters('epochs') max_headline_length = settings.get_training_parameters( 'max_headline_length') max_article_length = settings.get_training_parameters('max_article_length') headline_numeric_log = NumericLog(max_headline_length) article_numeric_log = NumericLog(max_article_length) model_builder = Model6Builder() \ .set_input('headline_numeric_log', headline_numeric_log) \ .set_input('article_numeric_log', article_numeric_log) model = model_builder() preprocessor = Preprocessor(model) preprocessor.set_encoder('headline_numeric_log', headline_numeric_log) preprocessor.set_encoder('article_numeric_log', article_numeric_log) preprocessor.load_data([ 'headline_log_representation', 'article_log_representation', 'is_top_submission' ]) training_input = [ preprocessor.training_data['headline_log_representation'], preprocessor.training_data['article_log_representation'] ] validation_input = [ preprocessor.validation_data['headline_log_representation'], preprocessor.validation_data['article_log_representation'] ] training_output = [preprocessor.training_data['is_top_submission']] validation_output = [preprocessor.validation_data['is_top_submission']] class_weights = calculate_class_weights( preprocessor.training_data['is_top_submission'], [ol.name for ol in model.output_layers]) callbacks = CallbackBuilder( model, [CsvLogger, CsvPlotter, ConfigLogger, ModelSaver])() model.fit(training_input, training_output, batch_size=batch_size, epochs=epochs, callbacks=callbacks, validation_data=(validation_input, validation_output), class_weight=class_weights)