def setup(): # Your config parsing goes here. parser = get_parser() parser = make_cli_parser(parser, parser.parse_args().config) configure_logging(f"logs/{config.trainer.experiment_name}") # Add all default command line parsers and merge with yaml config file. config = parse_config(parser, PLDataModuleFromDatasets) return config
train=True) val = MNIST(download=False, root=".", transform=data_transform, train=False) return train, val if __name__ == "__main__": # SETUP ################################################## parser = get_parser() parser = make_cli_parser(parser, PLDataModuleFromDatasets) config = parse_config(parser, parser.parse_args().config) if config.trainer.experiment_name == "experiment": config.trainer.experiment_name = "mnist-rnn-classification" configure_logging(f"logs/{config.trainer.experiment_name}") if config.seed is not None: logger.info("Seeding everything with seed={seed}") pl.utilities.seed.seed_everything(seed=config.seed) train, test = get_data() # Get data and make datamodule ########################## ldm = PLDataModuleFromDatasets(train, test=test,
raw_dev, labels_dev, raw_test, labels_test, num_labels, ) if __name__ == "__main__": parser = get_parser() parser = make_cli_parser(parser, PLDataModuleFromCorpus) args = parser.parse_args() config_file = args.config config = parse_config(parser, config_file) # Set these by default. config.hugging_face_model = config.data.tokenizer config.data.add_special_tokens = True config.data.lower = "uncased" in config.hugging_face_model if config.trainer.experiment_name == "experiment": config.trainer.experiment_name = "finetune-bert-smt" configure_logging(f"logs/{config.trainer.experiment_name}") if config.seed is not None: logger.info("Seeding everything with seed={seed}") pl.utilities.seed.seed_everything(seed=config.seed) (