type=str, help='pretrained model checkpoint') parser.add_argument('--message', default='message', type=str, help='pretrained model checkpoint') parser.add_argument('--epochs', default=101, type=int, help='train epochs') parser.add_argument('--train', default=True, type=bool, help='train') args = parser.parse_args() save_path = args.save_path + f'{args.message}_{time_str}' if not os.path.exists(save_path): os.mkdir(save_path) logger = Logger(f'{save_path}/log.log') logger.Print(args) train_data, val_data, test_data = load_cisia_surf(train_size=args.batch_size, test_size=args.test_size) model = Model(pretrained=False, num_classes=2) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9, weight_decay=5e-4) scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.95) if use_cuda: model = model.cuda() criterion = criterion.cuda() eval_history = []
parser = argparse.ArgumentParser(description='face anto-spoofing') parser.add_argument('--batch-size', default='128', type=int, help='train batch size') parser.add_argument('--test-size', default='64', type=int, help='test batch size') parser.add_argument('--save-path', default='./logs/', type=str, help='log save path') parser.add_argument('--checkpoint', default='model.pth', type=str, help='pretrained model checkpoint') parser.add_argument('--message', default='message', type=str, help='pretrained model checkpoint') parser.add_argument('--epochs', default=101, type=int, help='train epochs') parser.add_argument('--train', default=True, type=bool, help='train') args = parser.parse_args() save_path = args.save_path + f'{args.message}_{time_str}' if not os.path.exists(save_path): os.mkdir(save_path) logger = Logger(f'{save_path}/log.log') logger.Print(args.message) train_data, val_data, test_data= load_cisia_surf(train_size=args.batch_size,test_size=args.test_size) model = Model(pretrained=False,num_classes=2) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.9,weight_decay=5e-4) scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.95) ct_loss = CenterLoss(num_classes=2, feat_dim=512) optimzer4ct = optim.SGD(ct_loss.parameters(), lr =0.01, momentum=0.9,weight_decay=5e-4) scheduler4ct = lr_scheduler.ExponentialLR(optimzer4ct, gamma=0.95) if use_cuda: model = model.cuda() criterion = criterion.cuda()