Beispiel #1
0
def get_model(dataset, arch):
    #    arch2 = dict_arch2deephunter.get(arch,arch)
    model = get_net(name=arch, dt_name=dataset)

    if arch == "convmnist":
        layer_names = ["0/relu1", "1/relu2", "2/relu3"]
        num_layer = 3
    elif arch == "convcifar10":
        layer_names = [
            "0/relu1", "1/relu2", "2/relu3", "3/relu4", "4/relu5", "5/relu6"
        ]
        num_layer = 6
    elif arch == "alexnet":
        layer_names = [
            '0/features/1', '1/features/4', '2/features/7', '3/features/9',
            '4/features/11'
        ]
        num_layer = 5
    elif arch == "vgg":
        #         layer_names = ["5/relu6"]
        layer_names = [
            '0/relu1', '1/relu2', '2/relu3', '3/relu4', '4/relu5', '5/relu6',
            '6/relu7', '7/relu8', '8/relu9', '9/relu10'
        ]
        num_layer = 10
    elif arch == "vgg16_bn":
        #         layer_names = ["5/relu6"]
        layer_names = [
            '0/features/2', '1/features/5', '2/features/9', '3/features/12',
            '4/features/16', '5/features/19', '6/features/22', '7/features/26',
            '8/features/29', '9/features/32'
        ]
        num_layer = 10
    return model, layer_names, num_layer
Beispiel #2
0
def test_get_net():
    name_list = [
        "convmnist",
        # "convcifar10",
        # "vgg",
        # "alexnet",
        # "alexnet",
    ]

    for name in name_list:
        model = get_net(name=name)
        assert model is not None
Beispiel #3
0
def load_npc_lnpc(fileid, ):
    file_info = gdrive_fileids.rq3_fileid_list[args_fid]

    model_name = file_info["arch"]
    mode = file_info["attack"]
    dataset = file_info["dataset"]

    sample_threshold, cluster_threshold, cluster_num = calc_sadl_utils_data.get_cluster_para(
        dataset, model_name)
    #print(model_name, mode)
    intra = {}
    layer_intra = {}
    m_name = f"{model_name},{mode}"

    # models[model_name]
    nn_model = get_net(name=model_name)
    # test_set = DatasetAdv(file_id_list[m_name])
    test_set = dataloader.get_dataloader(fileid)

    fetch_func = lambda x: x["your_adv"]

    time_collect_list = []

    for index, datax in enumerate(test_set):
        covered_10 = set()
        covered_100 = set()
        total_100 = total_10 = 0

        keys = datax["key"]
        # print("keys:", keys)

        x_test = datax["your_adv"]
        y_test = torch.rand((x_test.shape[0]))
        #print ("x_test:",x_test.mean(),x_test.std(),"y_test:",y_test.mean(),y_test.std())

        test_loader1 = torch.torch.utils.data.DataLoader(
            torch.utils.data.TensorDataset(x_test, y_test),
            batch_size=BATCH_SIZE)

        for step, (x, y) in enumerate(test_loader1):
            # print("step", step)
            x = x.to(device)

            # models[model_name] = models[model_name].to(device)
            # cover = Coverager(models[model_name], model_name, cluster_threshold, num_classes=num_classes, num_cluster=cluster_num)
            cover = Coverager(nn_model,
                              model_name,
                              cluster_threshold,
                              num_classes=num_classes,
                              num_cluster=cluster_num)
            #print ("model_name",model_name,"cluster_threshold",cluster_threshold,"num_classes",num_classes,"clust",cluster_num)

            start_time1 = time.time()
            covered1, total1 = cover.Intra_NPC(x,
                                               y,
                                               bucket_m,
                                               sample_threshold,
                                               mode=mode,
                                               simi_soft=False,
                                               arc=model_name)
            start_time2 = time.time()
            covered2, total2 = cover.Layer_Intra_NPC(x,
                                                     y,
                                                     bucket_m,
                                                     sample_threshold,
                                                     mode=mode,
                                                     simi_soft=False,
                                                     useOldPaths_X=True,
                                                     arc=model_name)
            start_time3 = time.time()

            total_10 += total1
            total_100 += total2
            covered_10 = covered_10 | covered1
            covered_100 = covered_100 | covered2
            time_collect_list.append((start_time1, start_time2, start_time3))


#                     print(cover.get_simi(x, y, bucket_m, single_threshold, mode=mode, simi_soft=False))
        intra[keys] = round(len(covered_10) / total1, 5)

        layer_intra[keys] = round(len(covered_100) / total2, 5)
        #print(m_name, intra[keys],"<---intra")
        #print(m_name, layer_intra[keys],"<---layer_intra")
    return intra, layer_intra, time_collect_list
Beispiel #4
0
    ])
    dataset = torchvision.datasets.MNIST(root='~/.torch/',
                                         train=args.data_train,
                                         download=True,
                                         transform=transform_test)
#     data_loader = torch.utils.data.DataLoader(
#         testset, batch_size=batch_size, shuffle=False)
else:
    pass

if args.dataset == "mnist":
    # ori_model = ConvnetMnist()
    # ori_model.load_state_dict(torch.load("trained_models/mnist_mixup_acc_99.28_ckpt.pth")["net"])
    # net = mask_ConvnetMnist()
    # net.load_state_dict(torch.load("trained_models/mnist_mixup_acc_99.28_ckpt.pth")["net"])
    ori_model = get_net(name="mnist")
    net = get_masked_net(name="mnist")

ori_model = ori_model.cuda()
ori_model.eval()
net = net.cuda()
net.eval()

grids = args.grids

right_count = [0 for i in range(grids)]
prob_count = [0 for i in range(grids)]
remedy_count = [0 for i in range(grids)]
num = [0 for i in range(grids)]
results = []