Exemple #1
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = HifiGanModel(cfg=cfg.model, trainer=trainer)
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([epoch_time_logger])
    trainer.fit(model)
Exemple #2
0
def main(cfg):
    trainer = pl.Trainer(plugins=[DDPPlugin(find_unused_parameters=True)], **cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = MelGanModel(cfg=cfg.model, trainer=trainer)
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([epoch_time_logger])
    trainer.fit(model)
Exemple #3
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get('exp_manager', None))
    model = TalkNetDursModel(cfg=cfg.model, trainer=trainer)
    lr_logger = pl.callbacks.LearningRateLogger()
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([lr_logger, epoch_time_logger])
    trainer.fit(model)
Exemple #4
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = FastPitchModel(cfg=cfg.model, trainer=trainer)
    lr_logger = pl.callbacks.LearningRateMonitor()
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([lr_logger, epoch_time_logger])
    trainer.fit(model)
Exemple #5
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get('exp_manager', None))
    model = TalkNetPitchModel(cfg=cfg.model)
    trainer.callbacks.extend(
        [pl.callbacks.LearningRateMonitor(),
         LogEpochTimeCallback()])  # noqa
    trainer.fit(model)
Exemple #6
0
def main(cfg):
    preprocess_linear_specs_dataset(**cfg.model.preprocessor)
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = EDMel2SpecModel(cfg=cfg.model, trainer=trainer)
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([epoch_time_logger])
    trainer.fit(model)
def main(cfg):
    trainer = pl.Trainer(plugins=[DDPPlugin(find_unused_parameters=True)],
                         **cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = FastPitchHifiGanE2EModel(cfg=cfg.model, trainer=trainer)
    lr_logger = pl.callbacks.LearningRateMonitor()
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([lr_logger, epoch_time_logger])
    trainer.fit(model)
Exemple #8
0
def main(cfg):
    # Define the Lightning trainer
    trainer = pl.Trainer(**cfg.trainer)
    # exp_manager is a NeMo construct that helps with logging and checkpointing
    exp_manager(trainer, cfg.get("exp_manager", None))
    # Define the Tacotron 2 model, this will construct the model as well as
    # define the training and validation dataloaders
    model = Tacotron2Model(cfg=cfg.model, trainer=trainer)
    # Let's add a few more callbacks
    lr_logger = pl.callbacks.LearningRateMonitor()
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([lr_logger, epoch_time_logger])
    # Call lightning trainer's fit() to train the model
    trainer.fit(model)
Exemple #9
0
def main(cfg):
    if hasattr(cfg.model.optim, 'sched'):
        logging.warning(
            "You are using an optimizer scheduler while finetuning. Are you sure this is intended?"
        )
    if cfg.model.optim.lr > 1e-3 or cfg.model.optim.lr < 1e-5:
        logging.warning("The recommended learning rate for finetuning is 2e-4")
    trainer = pl.Trainer(**cfg.trainer)
    exp_manager(trainer, cfg.get("exp_manager", None))
    model = FastPitchModel(cfg=cfg.model, trainer=trainer)
    model.maybe_init_from_pretrained_checkpoint(cfg=cfg)
    lr_logger = pl.callbacks.LearningRateMonitor()
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([lr_logger, epoch_time_logger])
    trainer.fit(model)
Exemple #10
0
def main(cfg):
    trainer = pl.Trainer(**cfg.trainer)
    epoch_time_logger = LogEpochTimeCallback()
    trainer.callbacks.extend([epoch_time_logger])
    exp_manager(trainer, cfg.get("exp_manager", None))

    if cfg.resume_from_ckpt is None:
        logging.info("Training UniGlow from scratch")
        model = UniGlowModel(cfg=cfg.model, trainer=trainer)
    else:
        logging.info("Fine-tuning UniGlow from {cfg.resume_from_ckpt}")
        model = UniGlowModel.restore_from(cfg.resume_from_ckpt)
        model.setup_training_data(cfg.model.train_ds)
        model.setup_validation_data(cfg.model.validation_ds)

    trainer.fit(model)