def main():
    # forward_model = load_model()
    forward_model = create_fmodel()
    backward_model = create_fmodel()

    model = foolbox.models.CompositeModel(
        forward_model=forward_model,
        backward_model=backward_model)

    # input_dir = '/home/hongyang/data/tiny-imagenet-200-aug/tiny-imagenet-200/train'
    # Images, Labels = read_train_images(input_dir)
    # print("Images.shape: ", Images.shape)
    image, _ = foolbox.utils.imagenet_example((64, 64))
    label = np.argmax(model.predictions(image))  # just for debugging

    for idx in range(1):
        # image is a numpy array with shape (64, 64, 3)
        # and dtype float32, with values between 0 and 255;
        # label is the original label (for untargeted
        # attacks) or the target label (for targeted attacks)
        # adversarial = run_attack(model, image, label)
        # store_adversarial(file_name, adversarial)
        adversarial = run_attack(model=model, image=image, label=label)
        if adversarial is None:
            print('attack failed')
        else:
            print('attack found adversarial')
Example #2
0
def create_fmodel_cfg(cfg_path):
    _old = os.environ.get("FMODEL_MODEL_CFG", None)
    os.environ["FMODEL_MODEL_CFG"] = cfg_path
    fmodel = create_fmodel()
    if _old is None:
        os.environ.pop("FMODEL_MODEL_CFG")
    else:
        os.environ["FMODEL_MODEL_CFG"] = _old
    return fmodel
Example #3
0
def main():
    forward_model = create_fmodel()
    backward_model = create_bmodel()

    model = foolbox.models.CompositeModel(forward_model=forward_model,
                                          backward_model=backward_model)

    print("\n\nStart Test...")
    test(model, run_attack_curls_whey, "Curls & Whey")
Example #4
0
def main():
    # instantiate blackbox and substitute model
    forward_model = load_model()
    backward_model = create_fmodel()

    # instantiate differntiable composite model
    # (predictions from blackbox, gradients from substitute)
    model = CompositeModel(forward_model=forward_model,
                           backward_model=backward_model)
    for (file_name, image, label) in read_images():
        adversarial = run_attack(model, image, label)
        store_adversarial(file_name, adversarial)
    # Announce that the attack is complete
    # NOTE: In the absence of this call, your submission will timeout
    # while being graded.
    attack_complete()
Example #5
0
def test_imagenet(model,
                  white_model_name,
                  attack_func,
                  bbmodel_names,
                  method,
                  att_type,
                  data_dir,
                  eplison=np.array([0.5, 51]),
                  blur_strategy=None,
                  step_size=5,
                  numSP=-1,
                  mask_att_l1=2.0,
                  direction=None,
                  deblurred=None,
                  gpuid=0):

    method_name = white_model_name + "_" + white_model_name + "_" + method

    result_root = ''

    if method[0:5] == "mbAdv":
        pert_type = "Blur"
        step_size = int(step_size)
    else:
        pert_type = "Add"

    if not os.path.exists(result_root):
        os.mkdir(result_root)

    result_root = result_root + '/' + method_name + '/'

    if not os.path.exists(result_root):
        os.mkdir(result_root)

    valdir = os.path.join(data_dir)

    batch_size = 1

    print("eplison:{}".format(eplison))

    if att_type == 'TA':
        file_att_type = att_type
    else:
        file_att_type = ''

    if len(eplison) == 2:
        if numSP == -1 or blur_strategy not in [
                "bg_obj_att", "obj_att", "att"
        ]:
            file_name = '/{}eplison_{}_{}'.format(file_att_type, eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}/'.format(blur_strategy)
        elif numSP == -3:
            file_name = '/{}eplison_{}_{}'.format(file_att_type, eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}'.format(blur_strategy)+'_mask_att_l1_{}/'.format(mask_att_l1)

        if not os.path.exists(result_root + file_name):
            os.mkdir(result_root + file_name)

    elif len(eplison) == 1:

        eplison[0] = np.round(eplison[0], 4)
        step_size = np.round(step_size, 4)
        file_name = '/{}eplison_{}'.format(
            file_att_type, eplison[0]) + '_stepsize_{}'.format(
                step_size) + '_blur_strategy_{}/'.format(blur_strategy)

        if not os.path.exists(result_root + file_name):
            os.mkdir(result_root + file_name)

    print(file_name)

    if direction is not None:
        file_name = '/eplison_{}_{}'.format(
            eplison[0], eplison[1]) + '_stepsize_{}'.format(
                step_size) + '_direction_{}_{}'.format(
                    direction[0],
                    direction[1]) + '_blur_strategy_{}/'.format(blur_strategy)
        if not os.path.exists(result_root + file_name):
            os.mkdir(result_root + file_name)

    print("savename:{}".format(file_name))

    if isinstance(eplison, np.ndarray) and len(eplison) == 1:
        eplison = eplison[0]

    # define the dataloader
    #------------------------------------------------------------------------------------------------------------------#
    workers = 4
    slt_num = 1000
    slt_name = result_root + "results/imagenet_slt_" + str(slt_num) + ".npy"

    slt_num_saved = 10000
    slt_name_saved = result_root + "results/imagenet_slt_" + str(
        slt_num_saved) + ".npy"

    if os.path.exists(slt_name):
        sltIdx = np.load(slt_name)
        sltIdx.sort(axis=0)
    else:
        if os.path.exists(slt_name_saved) and slt_num_saved >= slt_num:
            sltIdx_saved = np.load(slt_name_saved)
            sltIdx = slt_images(model, valdir, slt_num, sltIdx_saved)
            sltIdx.sort(axis=0)
            np.save(slt_name, sltIdx)
        else:
            # slt images from imagenet
            sltIdx = slt_images(model, valdir, slt_num)
            #sltIdx = np.random.choice(50000,slt_num,replace=False)
            sltIdx.sort(axis=0)
            np.save(slt_name, sltIdx)

    valid_sampler = torch.utils.data.SubsetRandomSampler(sltIdx.tolist())
    val_loader = torch.utils.data.DataLoader(datasets.ImageFolder(
        valdir,
        transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
        ])),
                                             batch_size=batch_size,
                                             shuffle=False,
                                             num_workers=workers,
                                             pin_memory=True,
                                             sampler=valid_sampler)
    #------------------------------------------------------------------------------------------------------------------#
    success_status = np.ones([slt_num]) * -1.
    success_status_fmodels = []
    fb_models = []
    checkexist = True

    for forward_model_name in bbmodel_names:
        success_status_fmodels.append(np.ones([slt_num]) * -1.)
        forward_model = create_fmodel("imagenet",
                                      model_name=forward_model_name,
                                      gpu=gpuid)
        fb_models.append(forward_model)

    for i, (images, true_labels, target_labels, index,
            sample_path) in enumerate(tqdm(val_loader)):

        file_path, file_full_name = os.path.split(sample_path[0])
        image_name, ext = os.path.splitext(file_full_name)
        file_name_ = file_name + image_name
        index = index.numpy()[0]

        if os.path.exists(os.path.join(result_root,
                                       file_name_ + ".png")) and checkexist:
            success_status[index], original, adversarial = load_adversarial(
                file_name_, images)
            print(file_name_ + " exists!")

        if att_type == "TA":
            label_or_target_class = target_labels.numpy()
        else:
            label_or_target_class = true_labels.numpy()

        if torch.is_tensor(images):
            images = images.numpy()

        adversarial, success_status[index] = attack_func(
            model,
            images,
            label_or_target_class,
            pert_type,
            os.path.join(valdir + "_saliency", image_name + "_saliency.jpg"),
            eplison,
            blur_strategy,
            step_size,
            numSP=numSP,
            mask_att_l1=mask_att_l1,
            direction=direction)

        save_adversarial(result_root + file_name_, adversarial)

    np.save(
        result_root + file_name + "/{}_{}_succ_rate{}.npy".format(
            white_model_name, white_model_name, slt_num), success_status)
    k = 0
    for forward_model_name in bbmodel_names:
        np.save(
            result_root + file_name + "/{}_{}_succ_rate{}.npy".format(
                white_model_name, forward_model_name, slt_num),
            success_status_fmodels[k])
        k += 1

    print("\n", method_name, "\n")
Example #6
0
def test_real(model,
              white_model_name,
              attack_func,
              bbmodel_names,
              method,
              att_type,
              data_dir,
              eplison=np.array([0.5, 51]),
              blur_strategy=None,
              step_size=5,
              numSP=-1,
              mask_att_l1=2.0,
              direction=None,
              deblurred=None,
              gpuid=0):

    method_name = white_model_name + "_" + white_model_name + "_" + method
    result_root = "put project path here"

    if method[0:5] == "mbAdv":
        pert_type = "Blur"
        step_size = int(step_size)
    else:
        pert_type = "Add"

    if not os.path.exists(result_root + "results/real/"):
        os.mkdir(result_root + "results/real/")

    if not os.path.exists(result_root + "results/real/" + method_name):
        os.mkdir(result_root + "results/real/" + method_name)

    valdir = os.path.join(data_dir)

    batch_size = 1

    if len(eplison) == 2:
        if numSP == -1 or blur_strategy not in [
                "bg_obj_att", "obj_att", "att"
        ]:
            file_name = 'real/' + method_name + '/eplison_{}_{}'.format(eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}/'.format(blur_strategy)
        elif numSP == -3:
            file_name = 'real/' + method_name + '/eplison_{}_{}'.format(eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}'.format(blur_strategy)+'_mask_att_l1_{}/'.format(mask_att_l1)

        if not os.path.exists(result_root + "results/" + file_name):
            os.mkdir(result_root + "results/" + file_name)

    elif len(eplison) == 1:
        file_name = 'real/' + method_name + '/eplison_{}'.format(
            eplison[0]) + '_stepsize_{}'.format(
                step_size) + '_blur_strategy_{}/'.format(blur_strategy)
        if not os.path.exists(result_root + "results/" + file_name):
            os.mkdir(result_root + "results/" + file_name)

    if direction is not None:
        file_name = 'real/' + method_name + '/eplison_{}_{}'.format(
            eplison[0], eplison[1]) + '_stepsize_{}'.format(
                step_size) + '_direction_{}_{}'.format(
                    direction[0],
                    direction[1]) + '_blur_strategy_{}/'.format(blur_strategy)
        if not os.path.exists(result_root + "results/" + file_name):
            os.mkdir(result_root + "results/" + file_name)

    print("savename:{}".format(file_name))

    if isinstance(eplison, np.ndarray) and len(eplison) == 1:
        eplison = eplison[0]

    slt_num = 7
    val_loader = torch.utils.data.DataLoader(datasets.Real(
        valdir,
        transform=transforms.Compose([
            transforms.CenterCrop([1080, 1080]),
            transforms.Resize(299),
            transforms.ToTensor()
        ])),
                                             batch_size=batch_size,
                                             shuffle=False)
    piltransform = transforms.Compose([
        transforms.CenterCrop([1080, 1080]),
        transforms.Resize(299),
        transforms.ToTensor()
    ])

    success_status = np.ones([slt_num]) * -1.
    success_status_fmodels = []
    fb_models = []
    checkexist = True

    if deblurred is not None:
        direct_eval = True
        file_name = file_name[:-1] + "_" + deblurred + "/"
        print(file_name)
    else:
        direct_eval = False

    if bbmodel_names is not None:
        for forward_model_name in bbmodel_names:
            success_status_fmodels.append(np.ones([slt_num]) * -1.)
            forward_model = create_fmodel("imagenet",
                                          model_name=forward_model_name,
                                          gpu=gpuid)
            fb_models.append(forward_model)

    vis = visdom.Visdom()

    for i, (images, true_labels, target_labels, index,
            sample_path) in enumerate(tqdm(val_loader)):

        if i < 3:
            continue

        file_path, file_full_name = os.path.split(sample_path[0])
        image_name, ext = os.path.splitext(file_full_name)

        file_name_ = file_name + image_name
        index = index.numpy()[0]

        print("Processing:" + file_name_)
        # try:
        images = images.numpy()

        # predict the original label
        predictions = model.forward_one(images.squeeze(0))
        label_or_target_class = np.array([np.argmax(predictions)])

        # apply the attack
        if torch.is_tensor(images):
            images = images.numpy()  # .squeeze(0).permute(1, 2, 0).numpy()
        vis.images(images, win='org')

        adversarial, success_status[index] = attack_func(
            model,
            images,
            label_or_target_class,
            pert_type,
            os.path.join(valdir + "_saliency", image_name + "_saliency.jpg"),
            eplison,
            blur_strategy,
            step_size,
            numSP=numSP,
            mask_att_l1=mask_att_l1,
            direction=direction)

        # generate real-blur image

        vid_name, frameid = image_name.split('_')
        frameid = int(frameid)
        video_path = file_path + '_video/' + vid_name + '/'
        imgs = []
        for imageid in range(frameid - 10, frameid + 10):
            framepath = video_path + '{0:04d}.jpg'.format(imageid)
            img = Image.open(framepath).convert('RGB')
            if piltransform is not None:
                img = piltransform(img)
            img = img.numpy()
            imgs.append(img)
        realblur = np.array(imgs).mean(axis=0)
        # realblur = realblur.astype(np.float32).transpose(2, 0, 1)
        vis.image(realblur, win='realblur')

        # do blackbox attack
        if success_status[index] == 1:
            vis.images(adversarial, win='advblur')
            # predict the original label
            predictions = model.forward_one(adversarial.squeeze(0))
            advblur_class = np.array([np.argmax(predictions)])
            print("advblur_cls:{}".format(advblur_class))
            predictions = model.forward_one(realblur)
            realblur_class = np.array([np.argmax(predictions)])
            print("realblur_cls:{}".format(realblur_class))

            if adversarial.max() > 1:
                adversarial = adversarial / 255
            adversarial = adversarial.astype("float32")
            store_adversarial(file_name_, images, adversarial)

    np.save(
        result_root + "results/" + file_name + "/{}_{}_succ_rate{}.npy".format(
            white_model_name, white_model_name, slt_num), success_status)
    k = 0
    for forward_model_name in bbmodel_names:
        np.save(
            result_root + "results/" +
            file_name + "/{}_{}_succ_rate{}.npy".format(
                white_model_name, forward_model_name, slt_num),
            success_status_fmodels[k])
        k += 1

    print("\n", method_name, "\n")
Example #7
0
from fmodel import create_fmodel
from adversarial_vision_challenge import model_server

if __name__ == '__main__':
    fmodel = create_fmodel()
    model_server(fmodel)
Example #8
0
def test_dev(model,
             white_model_name,
             bbmodel_names,
             method,
             data_dir,
             eplison=np.array([0.5, 51]),
             blur_strategy=None,
             step_size=5,
             numSP=-1,
             mask_att_l1=2.0,
             gpuid=0):

    method_name = white_model_name + "_" + white_model_name + "_" + method
    result_root = "put project path here"

    if not os.path.exists(result_root + "results/dev/"):
        os.mkdir(result_root + "results/dev/")

    if not os.path.exists(result_root + "results/dev/" + method_name):
        os.mkdir(result_root + "results/dev/" + method_name)

    valdir = os.path.join(data_dir)

    batch_size = 1

    if len(eplison) == 2:
        if numSP == -1 or blur_strategy not in [
                "bg_obj_att", "obj_att", "att"
        ]:
            file_name = 'dev/' + method_name + '/eplison_{}_{}'.format(eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}/'.format(blur_strategy)
        elif numSP == -3:
            file_name = 'dev/' + method_name + '/eplison_{}_{}'.format(eplison[0],eplison[1])+'_stepsize_{}'.format(step_size)\
                        +'_blur_strategy_{}'.format(blur_strategy)+'_mask_att_l1_{}/'.format(mask_att_l1)

        if not os.path.exists(result_root + "results/" + file_name):
            os.mkdir(result_root + "results/" + file_name)

    elif len(eplison) == 1:
        file_name = 'dev/' + method_name + '/eplison_{}'.format(
            eplison[0]) + '_stepsize_{}'.format(
                step_size) + '_blur_strategy_{}/'.format(blur_strategy)
        if not os.path.exists(result_root + "results/" + file_name):
            os.mkdir(result_root + "results/" + file_name)

    print("savename:{}".format(file_name))

    slt_num = 1000
    val_loader = torch.utils.data.DataLoader(datasets.Dev(
        valdir,
        target_file='dev_dataset.csv',
        transform=transforms.Compose([transforms.ToTensor()])),
                                             batch_size=batch_size,
                                             shuffle=False)

    success_status = np.ones([slt_num]) * -1.
    success_status_fmodels = []
    fb_models = []
    checkexist = True

    for forward_model_name in bbmodel_names:
        success_status_fmodels.append(np.ones([slt_num]) * -1.)
        forward_model = create_fmodel("imagenet",
                                      model_name=forward_model_name,
                                      gpu=gpuid)
        fb_models.append(forward_model)

    save_dir = result_root + "/experiments/interp_results/"
    if not os.path.exists(save_dir):
        os.mkdir(save_dir)

    save_dir = result_root + "/experiments/interp_results/" + method_name + "/"
    if not os.path.exists(save_dir):
        os.mkdir(save_dir)

    save_dir = save_dir + '/eplison_{}'.format(
        eplison[0]) + '_stepsize_{}'.format(
            step_size) + '_blur_strategy_{}/'.format(blur_strategy)
    if not os.path.exists(save_dir):
        os.mkdir(save_dir)

    # success rate
    succ_rate = np.ones([slt_num]) * -10.0
    consist_sum = np.ones([slt_num]) * -10.0
    consist_max = np.ones([slt_num]) * -10.0

    for i, (images, true_labels, target_labels, index,
            sample_path) in enumerate(tqdm(val_loader)):

        file_path, file_full_name = os.path.split(sample_path[0])
        image_name, ext = os.path.splitext(file_full_name)
        file_name_ = file_name + image_name
        index = index.numpy()[0]

        if os.path.exists(
                os.path.join(result_root + "results",
                             file_name_ + ".npy")) and checkexist:
            success_status[index], original, adversarial = load_adversarial(
                file_name_, images)
            print(file_name_ + " exists!")

            # do blackbox attack
            adv_masks = []
            diff_confs = []
            if success_status[index] == 1:
                # generate inpertation for the mbAdv_mifgsm
                save_path = save_dir + image_name + white_model_name
                adv_exp, adv_mask, diff_conf = interp_explan(
                    original, adversarial,
                    true_labels.numpy()[0], model, save_path)
                save(adv_mask, original, adversarial,
                     save_dir + image_name + white_model_name)
                adv_mask = adv_mask.cpu().detach().numpy()
                adv_mask = adv_mask[:, :, :, 0].transpose(1, 2, 0)
                adv_masks.append(adv_mask)
                diff_confs.append(diff_conf)
                if adversarial.max() > 1:
                    adversarial = adversarial / 255
                adversarial = adversarial.astype("float32")
                k = 0
                for forward_model in fb_models:
                    save_path = save_dir + image_name + bbmodel_names[k]
                    adv_exp, adv_mask, diff_conf = interp_explan(
                        original, adversarial,
                        true_labels.numpy()[0], forward_model, save_path)
                    save(adv_mask, original, adversarial,
                         save_dir + image_name + bbmodel_names[k])
                    k += 1
                    adv_mask = adv_mask.cpu().detach().numpy()
                    adv_mask = adv_mask[:, :, :, 0].transpose(1, 2, 0)
                    adv_masks.append(adv_mask)
                    diff_confs.append(diff_conf)
                # calculate the consistency
                adv_masks = np.concatenate(adv_masks, axis=2)
                consist_sum[i] = adv_masks.std(axis=2).sum()
                consist_max[i] = adv_masks.std(axis=2).max()
                # calculate the success rate
                succ_rate[i] = np.array(diff_confs).mean()
                #
                print("consist_sum:{}consist_max:{} succ_rate:{}".format(
                    consist_sum[i], consist_max[i], succ_rate[i]))
        else:
            print("continue!")
            continue
    np.savez(save_dir + "/consist_succ.npz", consist_sum, consist_max,
             succ_rate)
    print("Processing:" + file_name_)
Example #9
0
def create():
    for fname in glob.glob(os.path.join(here, "test_models/*.zip")):
        unzip_model(os.path.basename(fname).rsplit(".", 1)[0])
    fmodel = create_fmodel()
    return fmodel
Example #10
0
def check_show_imagenet_res(data_dir, dataset, gpuid=0, recheck=False):

    result_root = "/mnt/nvme/projects/BlurAttack/results/"
    check_file_path = result_root + dataset + "_valid_folds.npy"
    dataset = dataset + "/"
    bmodel_name = "resnet50"
    fmodel_names = ["resnet50", "densenet121", "pyramidnet101_a360"]
    slt_num = 1000

    if os.path.exists(check_file_path) and ~recheck:
        valid_fold_list = np.load(check_file_path).tolist()
    else:
        valdir = os.path.join(data_dir, 'val')
        slt_name = result_root + "/imagenet_slt_" + str(slt_num) + ".npy"
        sltIdx = np.load(slt_name)
        sltIdx.sort(axis=0)

        valid_sampler = torch.utils.data.SubsetRandomSampler(sltIdx.tolist())

        val_loader = torch.utils.data.DataLoader(datasets.ImageFolder(
            valdir,
            transforms.Compose([
                transforms.Resize(256),
                transforms.CenterCrop(224),
                transforms.ToTensor(),
            ])),
                                                 batch_size=1,
                                                 shuffle=False,
                                                 num_workers=4,
                                                 pin_memory=True,
                                                 sampler=valid_sampler)

        files = os.listdir(result_root + dataset)
        res_folds = []
        for file in tqdm(files):
            if os.path.isdir(result_root + dataset + file):
                res_folds.append(file)
                subfiles = os.listdir(result_root + dataset + file)
                for subfile in subfiles:
                    if os.path.isdir(result_root + dataset + file + "/" +
                                     subfile):
                        res_folds.pop()
                        res_folds.append(file + "/" + subfile)

        if os.path.exists(check_file_path) and ~recheck:
            valid_fold_list = np.load(check_file_path).tolist()
        else:
            valid_fold_list = []

        fmodels = []
        for fmodel_name in fmodel_names:
            fmodel = create_fmodel("imagenet",
                                   model_name=fmodel_name,
                                   gpu=gpuid)
            fmodels.append(fmodel)

        for res_fold in tqdm(res_folds):
            if res_fold in valid_fold_list:
                continue
            valid_fold_list.append(res_fold)

            # check if attack status files are existing:
            status_file_path = result_root + dataset + res_fold
            invalid_fold = True
            k = 0
            for fmodel in fmodels:
                if not os.path.exists(
                        status_file_path + "/{}_{}_succ_rate{}.npy".format(
                            bmodel_name, fmodel_names[k], slt_num)):
                    success_status = np.ones([slt_num]) * -1.
                    # if succ_rate doest not exist, we have to generate it
                    for i, (images, labels, index,
                            sample_path) in enumerate(tqdm(val_loader)):
                        file_path, file_full_name = os.path.split(
                            sample_path[0])
                        file_name_, ext = os.path.splitext(file_full_name)
                        file_name_ = dataset + res_fold + "/" + file_name_
                        index = index.numpy()[0]

                        if os.path.exists(
                                os.path.join(result_root,
                                             file_name_ + ".jpg")):
                            success_status[
                                sltIdx ==
                                index], _, adversarial = load_adversarial(
                                    file_name_)
                            print(file_name_ + " exists!")
                            # do blackbox attack
                            if success_status[sltIdx == index] == 1:
                                if adversarial.max() > 1:
                                    adversarial = adversarial.transpose(
                                        2, 0, 1) / 255
                                else:
                                    adversarial = adversarial.transpose(
                                        2, 0, 1)
                                adversarial = adversarial.astype("float32")
                                predictions = fmodel.forward_one(adversarial)
                                criterion1 = foolbox.criteria.Misclassification(
                                )
                                if criterion1.is_adversarial(
                                        predictions, labels):
                                    success_status[sltIdx == index] = 1
                                else:
                                    success_status[sltIdx == index] = 0
                            continue
                        else:
                            invalid_fold = False
                            break
                    np.save(
                        status_file_path + "/{}_{}_succ_rate{}.npy".format(
                            bmodel_name, fmodel_names[k], slt_num),
                        success_status)
                k += 1

                if invalid_fold == False:
                    valid_fold_list.pop()
                    break

        np.save(check_file_path, valid_fold_list)

    for fold in valid_fold_list:
        status_file_path = result_root + dataset + fold
        for fmodel_name in fmodel_names:

            res_path = status_file_path + "/{}_{}_succ_rate{}.npy".format(
                bmodel_name, fmodel_name, slt_num)
            status = np.load(res_path)
            succ_ = np.zeros_like(status)
            fail_ = np.zeros_like(status)
            already_ = np.zeros_like(status)
            succ_[status == 1.] = 1.
            fail_[status == 0.] = 1.
            already_[status == -1.] = 1.

            num_succ = succ_.sum()
            num_fail = fail_.sum()
            num_already = already_.sum()

            succ_rate = num_succ / (num_fail + num_already + num_succ)
            print("{}_bmodel:{}_fmodel:{}:success rate:{}".format(
                fold, bmodel_name, fmodel_name, succ_rate))

            # print the image names

    return valid_fold_list
Example #11
0
def check_show_dev_res(data_dir,
                       prefix,
                       white_model_name="inceptionv3",
                       slt_fold=None,
                       dataset="dev",
                       gpuid=0,
                       recheck=False,
                       extra_file=None,
                       skip_eval=False):

    result_root = "./results/"
    check_file_path = result_root + dataset + "_valid_folds.npy"
    dataset = dataset + "/"
    prefix = prefix + "/"
    bbmodel_names = [
        "inceptionresnetv2", "inceptionv3", "inceptionv4", "xception"
    ]
    slt_num = 1000
    valid_fold_list = []
    generate_npy = False

    if slt_fold is not None:
        valid_fold_list.append(slt_fold)
    else:

        if os.path.exists(check_file_path) and not recheck:
            valid_fold_list = np.load(check_file_path).tolist()
        else:
            valdir = os.path.join(data_dir)
            batch_size = 1
            slt_num = 1000

            files = os.listdir(result_root + dataset + prefix)
            res_folds = []
            for file in tqdm(files):
                if os.path.isdir(result_root + dataset + prefix + file):
                    res_folds.append(file)
                    subfiles = os.listdir(result_root + dataset + prefix +
                                          file)
                    k = 0
                    for subfile in subfiles:
                        if os.path.isdir(result_root + dataset + prefix +
                                         file + "/" + subfile):
                            print(subfile)
                            if k == 0:
                                res_folds.pop()
                            res_folds.append(file + "/" + subfile)
                        k += 1

            if os.path.exists(check_file_path) and not recheck:
                valid_fold_list = np.load(check_file_path).tolist()
            else:
                valid_fold_list = []

            success_status = np.ones([slt_num]) * -1.
            success_status_fmodels = []
            fb_models = []

            for forward_model_name in bbmodel_names:
                success_status_fmodels.append(np.ones([slt_num]) * -1.)
                forward_model = create_fmodel("imagenet",
                                              model_name=forward_model_name,
                                              gpu=gpuid)
                fb_models.append(forward_model)

            for res_fold in tqdm(res_folds):

                if res_fold in valid_fold_list:
                    continue

                valid_fold_list.append(res_fold)

                # check if attack status files are existing:
                status_file_path = result_root + dataset + prefix + res_fold
                invalid_fold = True

                # if succ_rate doest not exist, we have to generate it
                if not os.path.exists(status_file_path + \
                    "/{}_{}_succ_rate{}.npy".format(white_model_name, white_model_name, slt_num)):
                    valid_fold_list.pop()

            np.save(check_file_path, valid_fold_list)

    for fold in valid_fold_list:
        print(fold)
        status_file_path = result_root + dataset + prefix + fold
        for forward_model_name in bbmodel_names:
            res_path = status_file_path + "/{}_{}_succ_rate{}.npy".format(
                white_model_name, forward_model_name, slt_num)
            status = np.load(res_path)

            if extra_file is not None:
                print(extra_file)
                is_advs = np.zeros_like(status)
                preds = np.zeros_like(status)
                res_path = status_file_path + "/" + extra_file
                _, res_ext = os.path.splitext(extra_file)

                if res_ext == ".txt":
                    import re
                    f = open(res_path)
                    line = f.readline()
                    k = 0
                    imgnames = []
                    while line:
                        matchObj = re.match(r'(.*).png,(.*),[[](.*)[]]', line,
                                            re.M | re.I)
                        if matchObj:
                            imgnames.append(matchObj.group(1))
                            preds[k] = int(matchObj.group(2)) - 1
                            is_advs[k] = int(matchObj.group(3))
                        line = f.readline()
                        k += 1
                    f.close()
                    status_ = is_advs
                    status_[is_advs == 0.] = -1

                    import pandas as pd
                    target_df = pd.read_csv(os.path.join(
                        data_dir, 'dev_dataset.csv'),
                                            header=None)
                    f_to_true = dict(
                        zip(target_df[0][1:].tolist(),
                            [x - 1 for x in list(map(int, target_df[6][1:]))]))

                    for index_ in range(len(imgnames)):
                        imgname, _ = os.path.splitext(imgnames[index_])
                        true_label = f_to_true[imgname] if f_to_true[
                            imgname] else 0
                        if is_advs[index_] == 1.:
                            if true_label == preds[index_]:
                                status_[index_] = -1
                            else:
                                status_[index_] = 1
                        #print("pred_label:{} true_label:{}".format(preds[index_],true_label))
                    status = status_

                elif res_ext == ".npz":

                    data = np.load(res_path, allow_pickle=True)
                    pred_lbl = data["pred"]
                    true_lbl = data["truth"]
                    true_lbl2 = data["lbl_text"]
                    pred_logit = data["logit"]
                    _, uniq_idx = np.unique(true_lbl2, return_index=True)

                    pred_lbl = pred_lbl[uniq_idx]
                    true_lbl = true_lbl[uniq_idx]
                    #true_lbl2 = true_lbl2[uniq_idx]
                    #pred_logit = pred_logit[uniq_idx]
                    status = np.ones_like(pred_lbl)
                    status[pred_lbl == 0] = -1
                    status[pred_lbl == true_lbl] = -1

            #print(status)

            succ_ = np.zeros_like(status)
            fail_ = np.zeros_like(status)
            already_ = np.zeros_like(status)
            succ_[status == 1.] = 1.
            fail_[status == -1.] = 1.
            already_[status == -0.] = 1.

            num_succ = succ_.sum()
            num_fail = fail_.sum()
            num_already = already_.sum()

            succ_rate = num_succ / (num_fail + num_already + num_succ)
            print("{}_bmodel:{}_fmodel:{}:success rate:{}".format(
                fold, white_model_name, forward_model_name, succ_rate))

            # find adversarial examples, remove fail results
            image_files = os.listdir(status_file_path)
            for file in image_files:
                full_path = status_file_path + "/" + file
                full_path_withoutext, file_ext = os.path.splitext(full_path)
                if file_ext == ".jpg" and full_path_withoutext[-4:] != "_org":
                    img = imageio.imread(full_path)
                    if img.max() == 0:
                        #print("remove:{}".format(full_path))
                        os.remove(full_path)
                        os.remove(full_path_withoutext + "_org.jpg")
                    if os.path.exists(full_path_withoutext + "_org.npy"):
                        os.remove(full_path_withoutext + "_org.npy")

    return valid_fold_list
Example #12
0
def load_model():
    return create_fmodel()