Ejemplo n.º 1
0
def main(args):
    model = PedalNet(args)
    trainer = pl.Trainer(
        max_epochs=args.max_epochs, gpus=args.gpus, row_log_interval=100
        # The following line is for use with the Colab notebook when training on TPUs.
        # Comment out the above line and uncomment the below line to use.
        
        # max_epochs=args.max_epochs, tpu_cores=args.tpu_cores, gpus=args.gpus, row_log_interval=100
    )
    trainer.fit(model)
Ejemplo n.º 2
0
def main(args):
    """
    This trains the PedalNet model to match the output data from the input data.

    When you resume training from an existing model, you can override hparams such as
        max_epochs, batch_size, or learning_rate. Note that changing num_channels,
        dilation_depth, num_repeat, or kernel_size will change the shape of the WaveNet
        model and is not advised.

    """
    if args.resume_training != "":
        model = PedalNet.load_from_checkpoint(args.resume_training)
        # Check for any hparams overridden by user and update
        for arg in sys.argv[1:]:
            arg2 = arg.split("=")[0].split("--")[1]
            if arg2 != "resume_training" and arg2 != "cpu" and arg2 != "tpu_cores":
                arg3 = arg.split("=")[1]
                if arg2 in model.hparams:
                    if arg2 == "learning_rate":
                        model.hparams[arg2] = float(arg3)
                    else:
                        model.hparams[arg2] = int(arg3)
                    print("Hparam overridden by user: "******"=", arg3, "\n")
        if args.cpu == 0:
            trainer = pl.Trainer(
                resume_from_checkpoint=args.resume_training,
                gpus=args.gpus,
                log_every_n_steps=100,
                max_epochs=args.max_epochs,
            )
        else:
            trainer = pl.Trainer(resume_from_checkpoint=args.resume_training,
                                 log_every_n_steps=100,
                                 max_epochs=args.max_epochs)
        print("\nHparams for continued model training:\n")
        print(model.hparams, "\n")
    else:
        model = PedalNet(args)
        if args.cpu == 0:
            trainer = pl.Trainer(max_epochs=args.max_epochs,
                                 gpus=args.gpus,
                                 log_every_n_steps=100)
            # The following line is for use with the Colab notebook when training on TPUs.
            # Comment out the above line and uncomment the below line to use.

            # max_epochs=args.max_epochs, tpu_cores=args.tpu_cores, gpus=args.gpus, log_every_n_steps=100
        else:
            trainer = pl.Trainer(max_epochs=args.max_epochs,
                                 log_every_n_steps=100)
    trainer.fit(model)
Ejemplo n.º 3
0
def main(args):
    """
    This trains the PedalNet model to match the output data from the input data.

    When you resume training from an existing model, you can override hparams such as
        max_epochs, batch_size, or learning_rate. Note that changing num_channels,
        dilation_depth, num_repeat, or kernel_size will change the shape of the WaveNet
        model and is not advised.

    """

    prepare(args)
    model = PedalNet(vars(args))
    trainer = pl.Trainer(
        resume_from_checkpoint=args.model if args.resume else None,
        gpus=None if args.cpu or args.tpu_cores else args.gpus,
        tpu_cores=args.tpu_cores,
        log_every_n_steps=100,
        max_epochs=args.max_epochs,
    )

    trainer.fit(model)
    trainer.save_checkpoint(args.model)
Ejemplo n.º 4
0
def main(args):
    model = PedalNet(args)
    trainer = pl.Trainer(max_epochs=args.max_epochs,
                         tpu_cores=args.tpu_cores,
                         row_log_interval=100)
    trainer.fit(model)
Ejemplo n.º 5
0
def main(args):
    model = PedalNet(vars(args))
    trainer = pl.Trainer(
        max_epochs=args.max_epochs, gpus=args.gpus, row_log_interval=100
    )
    trainer.fit(model)