Пример #1
0
def main():
    args = get_args()

    with open(args.config_path) as f:
        hparams = yaml.load(f, Loader=yaml.SafeLoader)

    neptune_logger = NeptuneLogger(
        api_key=os.environ["NEPTUNE_API_TOKEN"],
        project_name=hparams["project_name"],
        experiment_name=f"{hparams['experiment_name']}",  # Optional,
        tags=["pytorch-lightning", "mlp"],  # Optional,
        upload_source_files=[],
    )

    pipeline = RetinaFaceMask(hparams)

    Path(hparams["checkpoint_callback"]["filepath"]).mkdir(exist_ok=True, parents=True)

    trainer = object_from_dict(
        hparams["trainer"],
        logger=neptune_logger,
        checkpoint_callback=object_from_dict(hparams["checkpoint_callback"]),
    )

    trainer.fit(pipeline)
Пример #2
0
def test_neptune_pickle(tmpdir):
    """Verify that pickling trainer with neptune logger works."""
    tutils.reset_seed()

    from pytorch_lightning.logging import NeptuneLogger

    # hparams = tutils.get_hparams()
    # model = LightningTestModel(hparams)

    logger = NeptuneLogger(offline_mode=True)

    trainer_options = dict(default_save_path=tmpdir,
                           max_epochs=1,
                           logger=logger)

    trainer = Trainer(**trainer_options)
    pkl_bytes = pickle.dumps(trainer)
    trainer2 = pickle.loads(pkl_bytes)
    trainer2.logger.log_metrics({"acc": 1.0})
Пример #3
0
def test_neptune_logger(tmpdir):
    """Verify that basic functionality of neptune logger works."""
    tutils.reset_seed()

    from pytorch_lightning.logging import NeptuneLogger

    hparams = tutils.get_hparams()
    model = LightningTestModel(hparams)
    logger = NeptuneLogger(offline_mode=True)

    trainer_options = dict(default_save_path=tmpdir,
                           max_epochs=1,
                           train_percent_check=0.01,
                           logger=logger)
    trainer = Trainer(**trainer_options)
    result = trainer.fit(model)

    print('result finished')
    assert result == 1, "Training failed"
Пример #4
0
    def configure_optimizers(self):
        return torch.optim.Adam(self.parameters())

    def train_dataloader(self):
        self.dset = MathDataset("data/raw/math.train")
        train_loader = DataLoader(
            self.dset, batch_size=64, shuffle=True, collate_fn=self.dset.collate,
        )
        return train_loader

    def val_dataloader(self):
        dset = MathDataset("data/raw/math.val")
        self.val_dset = dset
        loader = DataLoader(
            dset, batch_size=64, shuffle=False, collate_fn=dset.collate,
        )
        return loader

if __name__ == "__main__":

    model = SimpleSeq2Seq()
    trainer = pl.Trainer(
        gpus=[0],
        gradient_clip_val=1.0,
        max_epochs=50,
        fast_dev_run=False,
        logger=NeptuneLogger(project_name="mbednarski/seq2seq"),
    )
    trainer.fit(model)