def get_model(cfg): cfg.merge_from_file('../configs/second/car.yaml') anchors = AnchorGenerator(cfg).anchors preprocessor = Preprocessor(cfg) model = Second(cfg).cuda().eval() ckpt = torch.load('../pvrcnn/ckpts/epoch_12.pth')['state_dict'] model.load_state_dict(ckpt, strict=True) return model, preprocessor, anchors
return scheduler def main(): """TODO: Trainer class to manage objects.""" # model = Second(cfg).cuda() model = PV_RCNN(cfg).cuda() print("Number parameters: ", sum(p.numel() for p in model.parameters() if p.requires_grad)) parameters = model.parameters() loss_fn = ProposalLoss(cfg) # loss_fn = OverallLoss(cfg) preprocessor = TrainPreprocessor(cfg) dataloader = build_train_dataloader(cfg, preprocessor) optimizer = torch.optim.Adam(parameters, lr=0.01) start_epoch = load_ckpt('./ckpts/epoch_10.pth', model, optimizer) scheduler = build_lr_scheduler(optimizer, cfg, start_epoch, len(dataloader)) train_model(model, dataloader, optimizer, scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch) if __name__ == '__main__': try: multiprocessing.set_start_method('spawn') except RuntimeError: pass global plotter plotter = VisdomLinePlotter(env='pvrcnn_testing') # cfg.merge_from_file('../configs/second/car.yaml') cfg.merge_from_file('../configs/pvrcnn/car.yaml') main()
return model.parameters() def main(): """TODO: Trainer class to manage objects.""" model = Second(cfg).cuda() parameters = model.parameters() loss_fn = ProposalLoss(cfg) preprocessor = TrainPreprocessor(cfg) dataloader = build_train_dataloader(cfg, preprocessor) optimizer = torch.optim.Adam(parameters, lr=0.01) scheduler = torch.optim.lr_scheduler.OneCycleLR( optimizer, max_lr=0.01, steps_per_epoch=len(dataloader), epochs=cfg.TRAIN.EPOCHS) start_epoch = load_ckpt('./ckpts/epoch_5.pth', model, optimizer) train_model(model, dataloader, optimizer, scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch) if __name__ == '__main__': try: multiprocessing.set_start_method('spawn') except RuntimeError: pass global plotter plotter = VisdomLinePlotter(env='second') cfg.merge_from_file('../configs/second/car.yaml') main()
def main(): """TODO: Trainer class to manage objects.""" model = PV_RCNN(cfg).cuda() loss_fn = ProposalLoss(cfg) preprocessor = TrainPreprocessor(cfg) dataloader = build_train_dataloader(cfg, preprocessor) parameters = get_proposal_parameters(model) optimizer = torch.optim.Adam(parameters, lr=cfg.TRAIN.LR) scheduler = torch.optim.lr_scheduler.OneCycleLR( optimizer, max_lr=3e-3, steps_per_epoch=len(dataloader), epochs=cfg.TRAIN.EPOCHS) start_epoch = load_ckpt('./ckpts/epoch_8.pth', model, optimizer) train_model(model, dataloader, optimizer, scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch) from multiprocessing import set_start_method if __name__ == '__main__': try: set_start_method('spawn') except RuntimeError: pass global plotter plotter = VisdomLinePlotter(env='training') cfg.merge_from_file('../configs/car_lite.yaml') main()
steps_per_epoch=N, epochs=cfg.TRAIN.EPOCHS, last_epoch=last_epoch) return scheduler def main(): """TODO: Trainer class to manage objects.""" model = Second(cfg).cuda() parameters = model.parameters() loss_fn = ProposalLoss(cfg) preprocessor = TrainPreprocessor(cfg) dataloader = build_train_dataloader(cfg, preprocessor) optimizer = torch.optim.Adam(parameters, lr=0.01) start_epoch = load_ckpt('./ckpts/carla/epoch_10.pth', model, optimizer) scheduler = build_lr_scheduler(optimizer, cfg, start_epoch, len(dataloader)) train_model(model, dataloader, optimizer, scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch) if __name__ == '__main__': try: multiprocessing.set_start_method('spawn') except RuntimeError: pass global plotter plotter = VisdomLinePlotter(env='carla') cfg.merge_from_file('../configs/carla/car.yaml') main()
for p in model.refinement_layer.parameters(): p.requires_grad = False return model.parameters() def main(): """TODO: Trainer class to manage objects.""" model = PV_RCNN(cfg).cuda() loss_fn = ProposalLoss(cfg) preprocessor = TrainPreprocessor(cfg) dataloader = build_train_dataloader(cfg, preprocessor) parameters = get_proposal_parameters(model) optimizer = torch.optim.Adam(parameters, lr=cfg.TRAIN.LR, weight_decay=1e-3) scheduler = torch.optim.lr_scheduler.OneCycleLR( optimizer, max_lr=3e-3, steps_per_epoch=len(dataloader), epochs=cfg.TRAIN.EPOCHS) start_epoch = load_ckpt('./ckpts/epoch_31.pth', model, optimizer) train_model(model, dataloader, optimizer, scheduler, loss_fn, cfg.TRAIN.EPOCHS, start_epoch) if __name__ == '__main__': global plotter plotter = VisdomLinePlotter(env='training') cfg.merge_from_file('../configs/all.yaml') main()