def main(args): """ Main training method """ print("Preparing for training...") training_config = config_holder.ConfigHolder(args.config_file).config training_config["num_epoch"] = args.num_epoch train_dataset = sentiment_dataset.train_input_fn(args.train, training_config) validation_dataset = sentiment_dataset.validation_input_fn(args.validation, training_config) eval_dataset = sentiment_dataset.eval_input_fn(args.eval, training_config) model = sentiment_model_cnn.keras_model_fn(None, training_config) print("Starting training...") model.summary() model.fit( x=train_dataset[0]['embedding_input'], y=train_dataset[1], steps_per_epoch=train_dataset[2]["num_batches"], epochs=training_config["num_epoch"], validation_data=(validation_dataset[0]['embedding_input'], validation_dataset[1]), validation_steps=validation_dataset[2]["num_batches"]) score = model.evaluate( eval_dataset[0]['embedding_input'], eval_dataset[1], steps=eval_dataset[2]["num_batches"], verbose=0) print("Test loss:{}".format(score[0])) print("Test accuracy:{}".format(score[1])) sentiment_model_cnn.save_model(model, os.path.join(args.model_output_dir, "sentiment_model.h5"))
def main(args): """ Main training method """ print("Preparing for training...") training_config = config_holder.ConfigHolder(args.config_file).config training_config["num_epoch"] = args.num_epoch embedding_matrix = sentiment_dataset._load_embedding_matrix(training_config) models = {} for model_name in training_config["models"]: train_dataset = sentiment_dataset.train_input_fn(args.train, training_config) validation_dataset = sentiment_dataset.validation_input_fn(args.validation, training_config) eval_dataset = sentiment_dataset.eval_input_fn(args.eval, training_config) model = sentiment_model.fit_model( model_name, embedding_matrix, training_config, train_dataset, validation_dataset, eval_dataset ) models[model_name] = model sentiment_model.save_model(model, os.path.join(args.model_output_dir, f"sentiment_model_{model_name}_non_stopwords_5.h5")) print("{} model saved to {}".format(model_name, os.path.join(args.model_output_dir, f"sentiment_model_{model_name}.h5")))
def main(args): """ Main training method """ print("Preparing for training...") training_config = config_holder.ConfigHolder(args.config_file).config training_config["num_epoch"] = args.num_epoch #here args.train is redundant and we just need the file name to be imported from the s3 bucket. This change can be observed in sentiment dataset train_dataset = sentiment_dataset.train_input_fn(args.train, 'train.json', training_config) validation_dataset = sentiment_dataset.validation_input_fn( args.validation, 'dev.json', training_config) eval_dataset = sentiment_dataset.eval_input_fn(args.eval, 'eval.json', training_config) model = sentiment_model_cnn.keras_model_fn(None, training_config) print("Starting training...") model.fit(x=train_dataset[0], y=train_dataset[1], steps_per_epoch=train_dataset[2]["num_batches"], epochs=training_config["num_epoch"], validation_data=(validation_dataset[0], validation_dataset[1]), validation_steps=validation_dataset[2]["num_batches"]) score = model.evaluate(eval_dataset[0], eval_dataset[1], steps=eval_dataset[2]["num_batches"], verbose=0) print("Test loss:{}".format(score[0])) print("Test accuracy:{}".format(score[1])) sentiment_model_cnn.save_model( model, os.path.join(args.model_output_dir, "sentiment_model.h5"))