def main(path, is_training, is_predicting, model_weights_file, submission_file): print('Starting train_statefarm.py') print('* using path: {0}'.format(path)) print('* training: {0}, predicting: {1}'.format(is_training, is_predicting)) batch_size = 64 data_provider = DataProvider(os.path.join(path, 'partition'), batch_size) feature_provider = FeatureProvider(data_provider) training_data_provider = TrainingDataProvider(data_provider, feature_provider) builder = ModelBuilder(training_data_provider, dropout=0.6, batch_size=batch_size) if is_training: print('Train last layer of dense model with batch normalization.') builder.train_last_layer() if is_training: print('Train dense layers of model with batch normalization.') builder.train_dense_layers() model = builder.build(data_provider) if not is_training: print('Loading model weights from {0}'.format(model_weights_file)) model.load_weights( data_provider.get_weight_filepath(model_weights_file)) else: model.train() print('Writing model weights to {0}'.format(model_weights_file)) model.save_weights( data_provider.get_weight_filepath(model_weights_file)) if is_predicting: print('Writing predictions to {0}'.format(submission_file)) batch_size = 2 data_provider = DataProvider(path, batch_size) predict_states(model, data_provider, batch_size, submission_file)