コード例 #1
0
def Modellist(model_name, num_classes=1000):
    if model_name == 'vgg11':
        return vgg.VGG11(num_classes)
    elif model_name == 'vgg13':
        return vgg.VGG13(num_classes)
    elif model_name == 'vgg16':
        return vgg.VGG16(num_classes)
    elif model_name == 'vgg19':
        return vgg.VGG19(num_classes)
    elif model_name == 'resnet18':
        return resnet.ResNet18(num_classes)
    elif model_name == 'resnet34':
        return resnet.ResNet34(num_classes)
    elif model_name == 'resnet50':
        return resnet.ResNet50(num_classes)
    elif model_name == 'resnet101':
        return resnet.ResNet101(num_classes)
    elif model_name == 'resnet152':
        return resnet.ResNet152(num_classes)
    elif model_name == 'densenet121':
        return densenet.DenseNet121(num_classes)
    elif model_name == 'densenet169':
        return densenet.DenseNet169(num_classes)
    elif model_name == 'densenet201':
        return densenet.DenseNet201(num_classes)
    elif model_name == 'densenet161':
        return densenet.DenseNet161(num_classes)
    elif model_name == 'mobilenetv3_small':
        return mobilenetv3.mobilenetv3_small(num_classes)
    elif model_name == 'mobilenetv3_large':
        return mobilenetv3.mobilenetv3_large(num_classes)
    elif model_name == 'mobilenetv3_large':
        return mobilenetv3.mobilenetv3_large(num_classes)
    elif model_name == 'efficientnet_b0':
        return efficientnet.efficientnet_b0(num_classes)
    elif model_name == 'efficientnet_b1':
        return efficientnet.efficientnet_b1(num_classes)
    elif model_name == 'efficientnet_b2':
        return efficientnet.efficientnet_b2(num_classes)
    elif model_name == 'efficientnet_b3':
        return efficientnet.efficientnet_b3(num_classes)
    elif model_name == 'efficientnet_b4':
        return efficientnet.efficientnet_b4(num_classes)
    elif model_name == 'efficientnet_b5':
        return efficientnet.efficientnet_b5(num_classes)
    elif model_name == 'efficientnet_b6':
        return efficientnet.efficientnet_b6(num_classes)
    else:
        raise ValueError("The model_name does not exist.")
コード例 #2
0
    def __init__(self, model_path, temporal_batch_size=3, img_size=224):

        ### Binary-class
        self.classes = ['non-touching', 'touching']
        self.touching_actions = ['touching']

        # b, c, w, h
        self.model = mobilenetv3_large()

        self.device = 'cuda' if torch.cuda.is_available() else 'cpu'
        self.model.to(self.device)
        self.model.eval()

        if self.device is 'cuda':
            state_dict = torch.load(model_path, map_location='cuda:0')
        else:
            state_dict = torch.load(model_path,
                                    map_location=torch.device('cpu'))

        self.model.load_state_dict(state_dict)

        self.temporal_batch_size = temporal_batch_size
        self.temporal_batch = np.zeros((img_size, img_size, 3))
        self.transforms = T.Compose([
            T.Resize((img_size, img_size)),
            T.ToTensor(),
            T.Normalize([0.5, 0.5, 0.5], [0.5, 0.5, 0.5])
        ])

        self.pred = None
        self.cnt = 0

        self.temporal_stride = 5
        self.temporal_cnt = 0

        self.tmp = 0
コード例 #3
0
def main(args, logger):
    writer = SummaryWriter(
        log_dir=os.path.join('logs', args.dataset, args.model_name, args.loss))

    train_loader, test_loader = load_data(args)
    if args.dataset == 'CIFAR10':
        num_classes = 10
    elif args.dataset == 'CIFAR100':
        num_classes = 100
    elif args.dataset == 'TINY_IMAGENET':
        num_classes = 200
    elif args.dataset == 'IMAGENET':
        num_classes = 1000

    print('Model name :: {}, Dataset :: {}, Num classes :: {}'.format(
        args.model_name, args.dataset, num_classes))
    if args.model_name == 'mixnet_s':
        model = mixnet_s(num_classes=num_classes, dataset=args.dataset)
        # model = mixnet_s(num_classes=num_classes)
    elif args.model_name == 'mixnet_m':
        model = mixnet_m(num_classes=num_classes, dataset=args.dataset)
    elif args.model_name == 'mixnet_l':
        model = mixnet_l(num_classes=num_classes, dataset=args.dataset)
    elif args.model_name == 'ghostnet':
        model = ghostnet(num_classes=num_classes)
    elif args.model_name == 'ghostmishnet':
        model = ghostmishnet(num_classes=num_classes)
    elif args.model_name == 'ghosthmishnet':
        model = ghosthmishnet(num_classes=num_classes)
    elif args.model_name == 'ghostsharkfinnet':
        model = ghostsharkfinnet(num_classes=num_classes)
    elif args.model_name == 'mobilenetv2':
        model = models.mobilenet_v2(num_classes=num_classes)
    elif args.model_name == 'mobilenetv3_s':
        model = mobilenetv3_small(num_classes=num_classes)
    elif args.model_name == 'mobilenetv3_l':
        model = mobilenetv3_large(num_classes=num_classes)
    else:
        raise NotImplementedError

    if args.pretrained_model:
        filename = 'best_model_' + str(args.dataset) + '_' + str(
            args.model_name) + '_ckpt.tar'
        print('filename :: ', filename)
        file_path = os.path.join('./checkpoint', filename)
        checkpoint = torch.load(file_path)

        model.load_state_dict(checkpoint['state_dict'])
        start_epoch = checkpoint['epoch']
        best_acc1 = checkpoint['best_acc1']
        best_acc5 = checkpoint['best_acc5']
        model_parameters = checkpoint['parameters']
        print(
            'Load model, Parameters: {0}, Start_epoch: {1}, Acc1: {2}, Acc5: {3}'
            .format(model_parameters, start_epoch, best_acc1, best_acc5))
        logger.info(
            'Load model, Parameters: {0}, Start_epoch: {1}, Acc1: {2}, Acc5: {3}'
            .format(model_parameters, start_epoch, best_acc1, best_acc5))
    else:
        start_epoch = 1
        best_acc1 = 0.0
        best_acc5 = 0.0

    if args.cuda:
        if torch.cuda.device_count() > 1:
            model = nn.DataParallel(model)
        model = model.cuda()

    print("Number of model parameters: ", get_model_parameters(model))
    logger.info("Number of model parameters: {0}".format(
        get_model_parameters(model)))

    if args.loss == 'ce':
        criterion = nn.CrossEntropyLoss()
    elif args.loss == 'focal':
        criterion = FocalLoss()
    else:
        raise NotImplementedError
    optimizer = optim.SGD(model.parameters(),
                          lr=args.lr,
                          momentum=args.momentum,
                          weight_decay=args.weight_decay)
    # lr_scheduler = optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer, T_0=10, T_mult=2, eta_min=0.001)
    lr_scheduler = optim.lr_scheduler.MultiStepLR(
        optimizer, milestones=[30, 60], gamma=0.1)  #learning rate decay

    for epoch in range(start_epoch, args.epochs + 1):
        # adjust_learning_rate(optimizer, epoch, args)
        train(model, train_loader, optimizer, criterion, epoch, args, logger,
              writer)
        acc1, acc5 = eval(model, test_loader, criterion, args)
        lr_scheduler.step()

        is_best = acc1 > best_acc1
        best_acc1 = max(acc1, best_acc1)

        if is_best:
            best_acc5 = acc5

        if not os.path.isdir('checkpoint'):
            os.mkdir('checkpoint')
        filename = 'model_' + str(args.dataset) + '_' + str(
            args.model_name) + '_ckpt.tar'
        print('filename :: ', filename)

        parameters = get_model_parameters(model)

        if torch.cuda.device_count() > 1:
            save_checkpoint(
                {
                    'epoch': epoch,
                    'arch': args.model_name,
                    'state_dict': model.module.state_dict(),
                    'best_acc1': best_acc1,
                    'best_acc5': best_acc5,
                    'optimizer': optimizer.state_dict(),
                    'parameters': parameters,
                }, is_best, filename)
        else:
            save_checkpoint(
                {
                    'epoch': epoch,
                    'arch': args.model_name,
                    'state_dict': model.state_dict(),
                    'best_acc1': best_acc1,
                    'best_acc5': best_acc5,
                    'optimizer': optimizer.state_dict(),
                    'parameters': parameters,
                }, is_best, filename)
        writer.add_scalar('Test/Acc1', acc1, epoch)
        writer.add_scalar('Test/Acc5', acc5, epoch)

        print(" Test best acc1:", best_acc1, " acc1: ", acc1, " acc5: ", acc5)
    writer.close()