示例#1
0
文件: template.py 项目: georgepar/slp
def setup():
    # Your config parsing goes here.
    parser = get_parser()
    parser = make_cli_parser(parser, parser.parse_args().config)
    configure_logging(f"logs/{config.trainer.experiment_name}")
    # Add all default command line parsers and merge with yaml config file.
    config = parse_config(parser, PLDataModuleFromDatasets)
    return config
示例#2
0
                  root=".",
                  transform=data_transform,
                  train=True)

    val = MNIST(download=False,
                root=".",
                transform=data_transform,
                train=False)

    return train, val


if __name__ == "__main__":
    # SETUP ##################################################
    parser = get_parser()
    parser = make_cli_parser(parser, PLDataModuleFromDatasets)

    config = parse_config(parser, parser.parse_args().config)

    if config.trainer.experiment_name == "experiment":
        config.trainer.experiment_name = "mnist-rnn-classification"

    configure_logging(f"logs/{config.trainer.experiment_name}")

    if config.seed is not None:
        logger.info("Seeding everything with seed={seed}")
        pl.utilities.seed.seed_everything(seed=config.seed)

    train, test = get_data()

    # Get data and make datamodule ##########################
示例#3
0
    config["model"] = {
        "intermediate_hidden":
        tune.choice([16, 32, 64, 100, 128, 256, 300, 512])
    }
    config["optimizer"] = tune.choice(["SGD", "Adam", "AdamW"])
    config["optim"]["lr"] = tune.loguniform(1e-4, 1e-1)
    config["optim"]["weight_decay"] = tune.loguniform(1e-4, 1e-1)
    config["data"]["batch_size"] = tune.choice([16, 32, 64, 128])

    return config


if __name__ == "__main__":
    # SETUP ##################################################
    parser = get_parser()
    parser = make_cli_parser(parser, PLDataModuleFromDatasets)  # type: ignore

    config = parse_config(parser, parser.parse_args().config)

    if config.trainer.experiment_name == "experiment":
        config.trainer.experiment_name = "mnist-classification"

    configure_logging()

    if config.seed is not None:
        logger.info("Seeding everything with seed={seed}")
        pl.utilities.seed.seed_everything(seed=config.seed)

    # These arguments may be provided from the command line or a config file
    # config = OmegaConf.to_container(config)
    # config["tune"] = {
示例#4
0
        num_labels = 2

    return (
        raw_train,
        labels_train,
        raw_dev,
        labels_dev,
        raw_test,
        labels_test,
        num_labels,
    )


if __name__ == "__main__":
    parser = get_parser()
    parser = make_cli_parser(parser, PLDataModuleFromCorpus)

    args = parser.parse_args()
    config_file = args.config

    config = parse_config(parser, config_file)
    # Set these by default.
    config.hugging_face_model = config.data.tokenizer
    config.data.add_special_tokens = True
    config.data.lower = "uncased" in config.hugging_face_model

    if config.trainer.experiment_name == "experiment":
        config.trainer.experiment_name = "finetune-bert-smt"

    configure_logging(f"logs/{config.trainer.experiment_name}")