Exemple #1
0
def generate_metrics(srcnn, classifier, test_batch_size, name, dataset):
    if dataset == "ilsvrc":
        cln_dataloader = data.DataLoader(
            load_imagenet(crpMode="clean", tsfrmMode="eval"),
            batch_size=test_batch_size,
            shuffle=False,
            num_workers=16,
            pin_memory=True,
            drop_last=False,
        )

        tng_dataloader = data.DataLoader(
            load_imagenet(crpMode="train", tsfrmMode="eval"),
            batch_size=test_batch_size,
            shuffle=False,
            num_workers=16,
            pin_memory=True,
            drop_last=False,
        )

        val_dataloader = data.DataLoader(
            load_imagenet(crpMode="val", tsfrmMode="eval"),
            batch_size=test_batch_size,
            shuffle=False,
            num_workers=16,
            pin_memory=True,
            drop_last=False,
        )
    else:
        raise NotImplementedError

    print("... validating cln_dataloader")
    cln_acc = give_me_acc(srcnn, classifier, cln_dataloader)
    print("... validating tng_dataloader")
    tng_acc, tng_acc_dict = give_me_acc(srcnn,
                                        classifier,
                                        tng_dataloader,
                                        plot_acc=True)
    print("... validating val_dataloader")
    val_acc, val_acc_dict = give_me_acc(srcnn,
                                        classifier,
                                        val_dataloader,
                                        plot_acc=True)

    result_dict = {
        "cln_acc": cln_acc,
        "tng_acc": tng_acc,
        "val_acc": val_acc,
    }

    with open(f"saved_models/{name}/best_acc.json", "w") as fp:
        json.dump(result_dict, fp)
Exemple #2
0
def main(args=None):
    if args.dataset == "cub":
        cln_dataloader = data.DataLoader(
            load_cub(crpMode="clean", tsfrmMode="eval", ten_crop_eval=False),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )

        tng_dataloader = data.DataLoader(
            load_cub(crpMode="train", tsfrmMode="eval", ten_crop_eval=False),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )

        val_dataloader = data.DataLoader(
            load_cub(crpMode="test", tsfrmMode="eval", ten_crop_eval=False),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )
    else:
        cln_dataloader = data.DataLoader(
            load_imagenet(crpMode="clean", tsfrmMode="eval"),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )

        tng_dataloader = data.DataLoader(
            load_imagenet(crpMode="train", tsfrmMode="eval"),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )

        val_dataloader = data.DataLoader(
            load_imagenet(crpMode="test", tsfrmMode="eval"),
            batch_size=args.test_batch_size,
            shuffle=False,
            num_workers=32,
            pin_memory=True,
            drop_last=False,
        )
    print(f"cln_dataloader load complete with ({len(cln_dataloader)}/{len(cln_dataloader.dataset)})")
    print(f"tng_dataloader load complete with ({len(tng_dataloader)}/{len(tng_dataloader.dataset)})")
    print(f"val_dataloader load complete with ({len(val_dataloader)}/{len(val_dataloader.dataset)})")

    new_weights = {}
    if args.enhancer == "ours" or args.enhancer == "mse":
        srcnn = SKUNet()
        weights = torch.load(args.srcnn_pretrained_path)

    elif args.enhancer == "owan":
        from owan_model import Network
        srcnn = Network(16, 10, nn.L1Loss())
        weights = torch.load(args.srcnn_pretrained_path)

    for k, v in weights.items():
        if "module." in k:
            new_weights[k.replace("module.", "")] = v
        else:
            new_weights[k] = v
    srcnn.load_state_dict(new_weights, strict=True)

    if args.mgpu:
        srcnn = nn.DataParallel(srcnn)
    srcnn.eval()
    srcnn.cuda()

    if args.recog == "r50":
        classifier = models.resnet50(pretrained=True)
    elif args.recog == "r101":
        classifier = models.resnet101(pretrained=True)
    elif args.recog == "v16":
        classifier = models.vgg16(pretrained=True)
    else:
        raise NotImplementedError

    if args.dataset == "cub":
        if args.recog == "v16":
            classifier.classifier[6] = nn.Linear(4096, 200)
            weights = torch.load("saved_models/base_models/vgg16_on_clean.ckpt.pt")

        elif args.recog == "r50":
            classifier.fc = nn.Linear(2048, 200)
            weights = torch.load("saved_models/base_models/resnet50_on_clean.ckpt.pt")

        elif args.recog == "r101":
            classifier.fc = nn.Linear(2048, 200)
            weights = torch.load("saved_models/base_models/resnet101_on_clean.ckpt.pt")

        new_weights = {}
        for k, v in weights.items():
            if "model." in k:
                new_weights[k.replace("model.", "")] = v
            else:
                new_weights[k] = v
        classifier.load_state_dict(new_weights, strict=True)

    if args.mgpu:
        classifier = nn.DataParallel(classifier)
    classifier.cuda()
    classifier.eval()

    print("... validating cln_dataloader")
    cln_acc = validate(srcnn, classifier, cln_dataloader, mode=args.dataset)
    print("... validating tng_dataloader")
    tng_acc = validate(srcnn, classifier, tng_dataloader, mode=args.dataset, vis=args.vis)
    print("... validating val_dataloader")
    val_acc = validate(srcnn, classifier, val_dataloader, mode=args.dataset)
    result_dict = {
        f"cln_acc": cln_acc,
        f"tng_acc": tng_acc,
        f"val_acc": val_acc,
    }
    print(result_dict)

    name = args.srcnn_pretrained_path.split("/")[-2]
    with open(f"metrics/{name}_{args.dataset}.json", "w") as fp:
        json.dump(result_dict, fp)
Exemple #3
0
parser.add_argument("--dataset", type=str, required=True, choices=["ilsvrc"])
parser.add_argument("--backbone", type=str, choices=["r18", "r50"])

parser.set_defaults(tanh=False)
parser.set_defaults(e2e=False)
parser.set_defaults(residual=False)
parser.set_defaults(classifier_tuning=False)
parser.set_defaults(load_classifier=False)
opt = parser.parse_args()

torch.manual_seed(opt.seed)
torch.cuda.manual_seed(opt.seed)

if opt.dataset == "ilsvrc":
    tng_dataloader = data.DataLoader(
        load_imagenet(crpMode="train", tsfrmMode="train"),
        batch_size=opt.batch_size,
        shuffle=True,
        num_workers=36,
        pin_memory=True,
        drop_last=False,
    )

    val_dataloader = data.DataLoader(
        load_imagenet(crpMode="train", tsfrmMode="eval"),
        batch_size=opt.test_batch_size,
        shuffle=False,
        num_workers=36,
        pin_memory=True,
        drop_last=False,
    )