bnm_scheduler = pt_utils.BNMomentumScheduler(model, bnm_lmbd) start_epoch = 1 best_prec = 0 best_loss = 1e10 else: start_epoch, best_loss = pt_utils.load_checkpoint( model, optimizer, filename=args.checkpoint.split(".")[0]) lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lbmd, last_epoch=start_epoch) bnm_scheduler = pt_utils.BNMomentumScheduler(model, bnm_lmbd, last_epoch=start_epoch) model_fn = model_fn_decorator(nn.CrossEntropyLoss()) # viz = pt_utils.VisdomViz(port=args.visdom_port) # viz.text(str(vars(args))) trainer = pt_utils.Trainer( model, model_fn, optimizer, checkpoint_name= "/kitti_semantic/Pointnet2_PyTorch-master/pointnet2/train/checkpoints/pointnet2_smeseg", best_name= "/kitti_semantic/Pointnet2_PyTorch-master/pointnet2/train/checkpoints/poitnet2_semseg_best", lr_scheduler=lr_scheduler, bnm_scheduler=bnm_scheduler # viz=viz
def test_xyz(): model = Pointnet2MSG(3, input_channels=3) pytest.helpers.semseg_test_xyz(model, model_fn_decorator(nn.CrossEntropyLoss()))
model, optimizer, filename=args.checkpoint.split(".")[0]) if checkpoint_status is not None: it, start_epoch, best_loss = checkpoint_status lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lambda=lr_lbmd, last_epoch=it) bnm_scheduler = pt_utils.BNMomentumScheduler(model, bn_lambda=bnm_lmbd, last_epoch=it) print("Defined even more") it = max(it, 0) # for the initialize value of `trainer.train` weights = train_set.get_weights() model_fn = model_fn_decorator( nn.CrossEntropyLoss(weight=weights.float()).cuda()) args.visdom = False if args.visdom: viz = pt_utils.VisdomViz(port=args.visdom_port) else: viz = pt_utils.CmdLineViz() print("initialized visdom (not)") viz.text(pprint.pformat(vars(args))) if not osp.isdir("checkpoints"): os.makedirs("checkpoints") trainer = pt_utils.Trainer( model, model_fn, optimizer,
if args.checkpoint is not None: checkpoint_status = pt_utils.load_checkpoint( model, optimizer, filename=args.checkpoint.split(".")[0] ) if checkpoint_status is not None: it, start_epoch, best_loss = checkpoint_status lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lambda=lr_lbmd, last_epoch=it) bnm_scheduler = pt_utils.BNMomentumScheduler( model, bn_lambda=bnm_lmbd, last_epoch=it ) it = max(it, 0) # for the initialize value of `trainer.train` if args.weights!="": weights=torch.from_numpy(np.loadtxt(args.weights)).float().cuda() model_fn = model_fn_decorator(nn.CrossEntropyLoss(weight=weights)) else: model_fn = model_fn_decorator(nn.CrossEntropyLoss(ignore_index=26)) if args.visdom: viz = pt_utils.VisdomViz(port=args.visdom_port) else: viz = pt_utils.CmdLineViz() viz.text(pprint.pformat(vars(args))) if not osp.isdir("checkpoints"): os.makedirs("checkpoints") trainer = pt_utils.Trainer( model,
checkpoint_status = pt_utils.load_checkpoint( model, optimizer, filename=args.checkpoint.split(".")[0]) if checkpoint_status is not None: it, start_epoch, best_loss = checkpoint_status lr_scheduler = lr_sched.LambdaLR(optimizer, lr_lambda=lr_lbmd, last_epoch=it) bnm_scheduler = pt_utils.BNMomentumScheduler(model, bn_lambda=bnm_lmbd, last_epoch=it) it = max(it, 0) # for the initialize value of `trainer.train` # model_fn = model_fn_decorator(nn.CrossEntropyLoss()) model_fn = model_fn_decorator(focal_loss.FocalLoss(class_num=2, alpha=torch.cuda.FloatTensor([1.0, 3.0]), \ gamma=2)) if args.visdom: viz = pt_utils.VisdomViz(port=args.visdom_port) else: viz = pt_utils.CmdLineViz() viz.text(pprint.pformat(vars(args))) if not osp.isdir("checkpoints"): os.makedirs("checkpoints") trainer = pt_utils.Trainer( model, model_fn, optimizer,