示例#1
0
def main():
    loss_fn = ProposalLoss(cfg)
    preprocessor = TrainPreprocessor(cfg)
    model = PV_RCNN(cfg, preprocessor).cuda()
    dataloader_train = build_train_dataloader(cfg)
    parameters = get_proposal_parameters(model)
    optimizer = torch.optim.Adam(parameters, lr=cfg.TRAIN.LR)
    start_epoch = load_ckpt('./epoch_0.pth', model, optimizer)
    train_model(model, dataloader_train, optimizer, loss_fn, cfg.TRAIN.EPOCHS,
                start_epoch)
示例#2
0
def main():
    """TODO: Trainer class to manage objects."""
    model = Second(cfg).cuda()
    parameters = model.parameters()
    loss_fn = ProposalLoss(cfg)
    preprocessor = TrainPreprocessor(cfg)
    dataloader = build_train_dataloader(cfg, preprocessor)
    optimizer = torch.optim.Adam(parameters, lr=0.01)
    start_epoch = load_ckpt('./ckpts/epoch_10.pth', model, optimizer)
    scheduler = build_lr_scheduler(optimizer, cfg, start_epoch,
                                   len(dataloader))
    train_model(model, dataloader, optimizer, scheduler, loss_fn,
                cfg.TRAIN.EPOCHS, start_epoch)
示例#3
0
def main():
    """TODO: Trainer class to manage objects."""
    # model = Second(cfg).cuda()
    model = PV_RCNN(cfg).cuda()
    print("Number parameters: ", sum(p.numel() for p in model.parameters() if p.requires_grad))
    parameters = model.parameters()
    loss_fn = ProposalLoss(cfg)
    # loss_fn = OverallLoss(cfg)
    preprocessor = TrainPreprocessor(cfg)
    dataloader = build_train_dataloader(cfg, preprocessor)
    optimizer = torch.optim.Adam(parameters, lr=0.01)
    start_epoch = load_ckpt('./ckpts/epoch_10.pth', model, optimizer)
    scheduler = build_lr_scheduler(optimizer, cfg, start_epoch, len(dataloader))
    train_model(model, dataloader, optimizer,
        scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch)
示例#4
0
def main():
    """TODO: Trainer class to manage objects."""
    model = PV_RCNN(cfg).cuda()
    loss_fn = ProposalLoss(cfg)
    preprocessor = TrainPreprocessor(cfg)
    dataloader = build_train_dataloader(cfg, preprocessor)
    parameters = get_proposal_parameters(model)
    optimizer = torch.optim.Adam(parameters, lr=cfg.TRAIN.LR)
    scheduler = torch.optim.lr_scheduler.OneCycleLR(
        optimizer,
        max_lr=3e-3,
        steps_per_epoch=len(dataloader),
        epochs=cfg.TRAIN.EPOCHS)
    start_epoch = load_ckpt('./ckpts/epoch_8.pth', model, optimizer)
    train_model(model, dataloader, optimizer, scheduler, loss_fn,
                cfg.TRAIN.EPOCHS, start_epoch)