Ejemplo n.º 1
0
    def craft(model, x, order, epsilon=1., max_iter=100):
        nb_elements = x.shape[0]
        if len(order.shape) == 1:
            nb_classes = len(order)
            order = np.repeat(np.array([order]), nb_elements, axis=0)
        else:
            nb_classes = order.shape[1]

        threshold = 1 / (nb_classes + 1)
        norm = 2

        logger.info(
            'Crafting Sorted PGD attack; norm:{} threshold:{} epsilon:{}'.
            format(norm, (threshold, SATA.power), epsilon))

        attack_params, attack_name = get_attack_params("targetted_pgd", norm,
                                                       epsilon)
        classifier = KerasClassifier(model=model)
        crafter = SATA(classifier,
                       num_random_init=num_random_init,
                       max_iter=max_iter,
                       eps=1.)
        crafter.nb_classes = nb_classes
        crafter.set_params(**attack_params)

        adv_x, ref_x = crafter.generate(x, order, threshold)
        return adv_x
Ejemplo n.º 2
0
def main():
    """Load and plot."""

    # Load data
    if not SELF_COMPARISON_DATA_PATH.exists():
        logger.error('Self comparison data not found at: {}'.format(
            SELF_COMPARISON_DATA_PATH))
        return
    data = pandas.read_csv(str(SELF_COMPARISON_DATA_PATH))

    # Generate table for each timeout
    for timeout in PREPROCESSING_TIMEOUTS:

        # Subset to data by timeout and grab desired columns
        subset = data[data[headers.TIMEOUT] == timeout][HEADERS]

        # Group and compute statistics
        describe = subset.groupby(GROUPBY).describe().round(decimals=1)

        # Format columns
        describe = describe.drop(columns=[(headers.SIZE, 'count')])
        int_headers = [(headers.SIZE, h)
                       for h in ['min', '25%', '50%', '75%', 'max']]
        describe[int_headers] = describe[int_headers].astype('int')

        # Generate latex
        describe.to_latex(
            str(IC_TABLES_DIR / IC_TABLE_FORMAT_STR.format(timeout)))

        # Print
        with PRINT_CONTEXT:
            logger.info('Timeout == {}'.format(timeout))
            logger.info(str(describe) + '\n')
Ejemplo n.º 3
0
def run(dataset="cifar10",
        model_type="basic",
        epochs=25,
        exp_id="_gen_dataset"):

    attack_name = "targeted_pgd"

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)
        np.random.seed(RANDOM_SEED)
        pass

    quality = 100
    extension = "png"
    l = 52
    nb_messages = 1000

    nb_classes = [2, 3]
    #nb_classes = [4,5]
    for nb_cls in nb_classes:
        experiment_id = "{}/{}".format(exp_id, nb_cls)
        for i in range(nb_messages):
            logger.info("## class {} iter {}".format(nb_cls, i))
            experiment_time = int(time.time())
            msg = "".join(
                [strs[random.randint(0,
                                     len(strs) - 1)] for i in range(l)])
            _encode(msg,
                    dataset,
                    model_type,
                    epochs,
                    experiment_id,
                    attack_name,
                    nb_classes=nb_cls,
                    experiment_time=experiment_time)
Ejemplo n.º 4
0
def run_tests(use_gpu=False):
    if not use_gpu:
        os.environ["CUDA_VISIBLE_DEVICES"] = "-1"

    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'

    import tensorflow as tf
    from utils.adversarial_models import load_model

    model, x_train, x_test, y_train, y_test = load_model(dataset="cifar10",
                                                         model_type="basic",
                                                         epochs=25)

    np.random.seed(RANDOM_SEED)

    nb_elements = 10
    nb_classes = 3
    order = np.array([2, 3, 5, 8, 9, 4, 6])
    order = order[0:nb_classes]

    order = np.random.randint(0, 10, (nb_elements, nb_classes))

    SATA.power = 1
    adv_x = SATA.craft(model,
                       x_test[:nb_elements],
                       order,
                       epsilon=3.,
                       max_iter=200)
    adv_y = model.predict(adv_x)
    logger.info("{}: {}".format(
        np.sort(-adv_y[0]).shape,
        list(zip(order, np.argsort(-1 * adv_y, axis=1)))))
Ejemplo n.º 5
0
    def craft(model,
              x,
              target,
              epsilon=3.,
              num_random_init=10,
              max_iter=100,
              quality=75,
              eps_step=0.1):

        target = to_categorical(target)
        norm = 2

        logger.info(
            'Crafting spacially resilient PGD attack; norm:{} epsilon:{}'.
            format(norm, epsilon))

        attack_params, attack_name = get_attack_params("targetted_pgd", norm,
                                                       epsilon)
        attack_params["eps_step"] = eps_step

        classifier = KerasClassifier(model=model)
        crafter = SpatialResilientPGD(classifier,
                                      num_random_init=num_random_init,
                                      max_iter=max_iter,
                                      eps=1.)
        crafter.set_params(**attack_params)

        adv_x = crafter.generate(x, target, quality)
        return adv_x
def main():
    """Validate."""

    # Validate directories
    argv = sys.argv[1:]
    if len(argv) != 2:
        raise Exception(
            'Usage: python -m experiments.sanity_check_preprocessing '
            '<preprocessed data dir> <preprocessed data dir>')
    dir1 = Path(argv[0]) / 'edgelist'
    dir2 = Path(argv[1]) / 'edgelist'

    logger.info('Loading edgelists from `{}`'.format(dir1))
    logger.info('Loading edgelists from `{}`'.format(dir2))

    # Get common edgelist files
    edgelists = _glob_edgelists(dir1) & _glob_edgelists(dir2)
    logger.info('Found {} shared edgelist files'.format(len(edgelists)))

    # Check each
    for edgelist in edgelists:

        logger.info('Validating `{}`'.format(edgelist))

        # Construct graphs from edgelists
        g1 = graphs.read_edgelist(dir1, edgelist)
        g2 = graphs.read_edgelist(dir2, edgelist)

        # Check for isomorphism
        if not graphs.could_be_isomorphic(g1, g2):
            logger.info('`{}` is not isomorphic'.format(edgelist))
Ejemplo n.º 7
0
def main(current_server_id, total_servers, seed_filter):
    """Run experiment."""

    # Find the datasets we want
    input_dir = Path('.') / 'data' / 'preprocessed' / 'edgelist'
    datasets = names_in_dir(input_dir, '.edgelist')
    datasets = [dataset.replace('.edgelist', '') for dataset in datasets]
    datasets = sorted(datasets)
    # Quantum data has no dashes, synthetics use dashes to separate parameters
    datasets = list(filter(seed_filter, datasets))
    # Split "-server 3 -of 3" across 3 servers
    dataset_partition = datasets[2::total_servers]
    dataset_partition = dataset_partition[current_server_id -
                                          1::total_servers][69:]

    print('Expected data:', dataset_partition)
    # Collect solvers
    solvers_dict = {
        # AI: _run_ai,
        ILP: _run_ilp,
        # ILP1T: partial(_run_ilp, threads=1),
        # IC: _run_ic
    }
    solvers = sorted(list(solvers_dict.items()))

    # Generate experiments
    experiments = product(dataset_partition, solvers)

    # Open output file for writing
    with open(str(EXACT_RESULTS_DATA_PATH), 'w') as output:

        # Get writer
        writer = csv.writer(output)

        # Write header
        writer.writerow([
            headers.DATASET, headers.SOLVER, headers.TIME, headers.SIZE,
            headers.CERTIFICATE
        ])

        # Run experiments
        for dataset, (name, solver) in experiments:

            # Log
            logger.info('Running {} on {}'.format(name, dataset))

            # Solve and write output
            try:
                solution = solver(dataset)
                if solution[0] >= EXACT_TIMEOUT:
                    continue
            except subprocess.TimeoutExpired:
                continue

            writer.writerow([dataset, name, *solution])
            output.flush()

    # Now generate the ground truth table if akiba_iwata was run
    if (AI, _run_ai) in solvers:
        _generate_ground_truth()
Ejemplo n.º 8
0
def main():
    """Run experiment."""

    # Open output file for writing
    with open(BASELINE_FILE, 'w') as output:

        # Get writer
        writer = csv.writer(output)

        # Write header
        writer.writerow([
            headers.DATASET, headers.SOLVER, headers.TIME, headers.SIZE,
            headers.CERTIFICATE
        ])

        # Run experiments
        for dataset in huffner_experiment_datasets:

            # Log
            logger.info('Computing OCT for {}'.format(dataset))

            # Solve and write output
            try:
                solution = solve(str(HUFFNER_DATA_DIR /
                                     (dataset + HUFFNER_DATA_EXT)),
                                 timeout=EXACT_TIMEOUT)
                if solution[0] >= EXACT_TIMEOUT:
                    continue
            except subprocess.TimeoutExpired:
                continue

            writer.writerow([dataset, HUFFNER_BASELINE_SOLVER, *solution])
            output.flush()
def run_tests(use_gpu=True):
    if not use_gpu:
        os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
    
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
    
    import tensorflow as tf
    from utils.adversarial_models import load_model

    model, x_train, x_test, y_train, y_test = load_model(
        dataset="cifar10", model_type="basic", epochs=25)
    
    quality = 100
    SpatialResilientPGD.test_quality = 75
    nb_elements = 100
    np.random.seed(RANDOM_SEED)
    y = np.random.randint(0,10,nb_elements)
    
    adv_x = SpatialResilientPGD.craft(model, x_test[:nb_elements],y, epsilon=3, max_iter=500, quality=quality, eps_step=0.01,num_random_init=50)
    adv_y = model.predict(adv_x)
    #logger.info("{}: {}".format(np.sort(-adv_y[0]).shape, list(zip(y, np.argmax(adv_y,axis=1)))))
 

    adv_path= "./utils/adv.jpg"
    save_img(adv_path,adv_x[0], quality=quality)

    
    adv_x_post = np.array([_load_image(adv_path)])
    adv_y_post = model.predict(adv_x_post)
    logger.info("{}-{}".format(np.argmax(adv_y_post,axis=1),np.argmax(adv_y,axis=1)))
Ejemplo n.º 10
0
def _decode(dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            experiment_time,
            extension=None):
    if not extension:
        extension = default_extension
    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    score = []
    for file in os.listdir(pictures_path):
        if file.endswith(".{}".format(extension)):
            path = "{}/{}".format(pictures_path, file)
            image = load_img(path)
            if len(image.size) < 3:
                image = image.convert("RGB")
            if image.width != 32:
                image = image.resize((32, 32), Image.BILINEAR)

            img = img_to_array(image) / palette
            img_class = np.argmax(model.predict(np.array([img]), verbose=0))
            index = file.index("_truth") - 1
            real_class = int(file[index:index + 1])
            steg_msg = lsb.reveal(path)
            logger.info("img {} decoded as {} stegano {}".format(
                file, img_class, steg_msg))

            score.append(real_class == img_class)

    logger.info("decoding score {}".format(np.mean(np.array(score))))
Ejemplo n.º 11
0
def _decode(dataset, model_type, epochs, experiment_id, attack_name,
            experiment_time):
    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    score = []
    lsb_score = []
    stat_score = []
    ensemble_score = []

    for file in os.listdir(pictures_path):
        if file.endswith(".{}".format(extension)):
            path = "{}/{}".format(pictures_path, file)
            img = img_to_array(load_img(path)) / palette
            img_class = np.argmax(model.predict(np.array([img]), verbose=0))
            index = file.index("_truth") - 1
            real_class = int(file[index:index + 1])
            score.append(real_class == img_class)

            steg_msg = lsb.reveal(path)
            lsb_score.append(0 if lsb_score == None else 1)

            bitrate_R = attacks.spa(path, 0)
            bitrate_G = attacks.spa(path, 1)
            bitrate_B = attacks.spa(path, 2)

            threshold = 0.05

            if bitrate_R < threshold and bitrate_G < threshold and bitrate_B < threshold:
                stat_score.append(0)
            else:
                stat_score.append(1)
            #logger.info("img {} decoded as {} stegano {}".format(file,img_class,steg_msg))

    logger.info("decoding score {}".format(np.mean(np.array(score))))
    real_path = "{}/ref".format(pictures_path)
    for file in os.listdir(pictures_path):
        if file.endswith(".{}".format(extension)):
            path = "{}/{}".format(pictures_path, file)

            e4s_score = _detect_e4s_srm(
                file_path=path,
                model_file="e4s_srm_bossbase_lsbm0.10_gs.model")
            ensemble_score.append(e4s_score)

            steg_msg = lsb.reveal(path)
            lsb_score.append(1 if lsb_score == None else 0)

            spa_score = _detect_spa(path)
            stat_score.append(spa_score)

    lsb_score_mean = np.mean(np.array(lsb_score))
    stat_score_mean = np.mean(np.array(stat_score))

    logger.info("lsb detection score {}, stats detection score {}".format(
        lsb_score_mean, stat_score_mean))
Ejemplo n.º 12
0
def _encode(msg,dataset, model_type, epochs, experiment_id,attack_name, attack_strength=2.0, extension=None, transformation=None):
    if not extension:
        extension = default_extension
    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg,encoded_msg))
    test_size = len(encoded_msg)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset, model_type=model_type, epochs=epochs)
    num_classes= 10

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)
    
    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test,verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    targets = np.array(to_categorical([int(i) for i in encoded_msg], num_classes), "int32")    
    #print(targets)
    
    adv_x = craft_attack(model,x,attack_name,y=targets, epsilon=attack_strength)
    yadv = np.argmax(model.predict(adv_x), axis=1)
    
    pictures_path = default_path.format(experiment_id,attack_name, experiment_time)
    os.makedirs(pictures_path, exist_ok =True)
    os.makedirs("{}/ref".format(pictures_path), exist_ok =True)

    advs = []
    for i, _adv in enumerate(adv_x):
        predicted = yadv[i]
        encoded = np.argmax(targets[i])
        truth = np.argmax(y[i])
        adv_path = "{}/{}_predicted{}_encoded{}_truth{}.{}".format(pictures_path,i,predicted,encoded,truth, extension)
        real_path = "{}/ref/{}.{}".format(pictures_path,i,extension)
           
        adv = array_to_img(_adv)

        if transformation=="rotate":
            adv = adv.rotate(10)

        elif transformation=="crop":
            adv = adv.crop((2,2,30,30))

        elif transformation=="upscale":
            adv = adv.resize((64,64),Image.BILINEAR)

        elif transformation=="compress":
            adv = _compress_img(adv)    

        yield  {"time":experiment_time,"file":adv_path,"img":adv}

        # adv.save(adv_path)

    return experiment_time
Ejemplo n.º 13
0
    def generate(self, x, y, quality):
        self.targeted = True
        """
        Generate adversarial samples and return them in an array.

        :param x: An array with the original inputs.
        :type x: `np.ndarray`
        :param y: The labels for the data `x`. Only provide this parameter if you'd like to use true
                  labels when crafting adversarial samples. Otherwise, model predictions are used as labels to avoid the
                  "label leaking" effect (explained in this paper: https://arxiv.org/abs/1611.01236). Default is `None`.
                  Labels should be one-hot-encoded.
        :type y: `np.ndarray`
        :return: An array holding the adversarial examples.
        :rtype: `np.ndarray`
        """

        targets = y

        adv_x_best = None
        rate_best = 0.0

        for i_random_init in range(max(1, self.num_random_init)):
            adv_x = x  #.astype(NUMPY_DTYPE)

            for i_max_iter in range(self.max_iter):

                adv_x = self._compute(adv_x,
                                      targets,
                                      self.eps,
                                      self.eps_step,
                                      self.num_random_init > 0
                                      and i_max_iter == 0,
                                      quality=quality)

                if self._project:
                    noise = projection(adv_x - x, self.eps, self.norm)
                    adv_x = x + noise

            rate = 100 * self.compute_success(self.classifier,
                                              x,
                                              targets,
                                              adv_x,
                                              self.targeted,
                                              quality=quality)
            logger.info('Success rate {}: {}'.format(i_random_init, rate))
            if rate > rate_best or adv_x_best is None:
                rate_best = rate
                adv_x_best = adv_x

        logger.info('Success rate of Spatially resilient PGD attack: %.2f%%',
                    rate_best)

        SpatialResilientPGD.rate_best = rate_best
        return adv_x_best
Ejemplo n.º 14
0
def main():
    """Run experiments"""

    # Log
    logger.info('Starting Heuristics Experiment')

    # Generate experiments
    experiments = product(PREPROCESSING_TIMEOUTS_MILLISECONDS, DATASETS)

    # Open output file
    with open(str(HEURISTICS_RESULTS_DATA_FILE), 'w') as output:

        # Get writer
        writer = csv.writer(output)

        # Write header
        writer.writerow(HEURISTICS_CSV_HEADERS)

        # Run experiments
        for experiment in experiments:

            # Log
            logger.info(
                'Starting experiment timeout={} dataset={}'.format(*experiment)
            )

            # Run subprocess
            cmd = [str(HEURISTIC_SOLVER), *experiment]
            proc = subprocess.run(
                cmd,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE
            )

            # Parse results
            output_string = bytes.decode(proc.stdout, 'utf-8').strip()
            match = re.match(
                r'(\d+),(\d+),"(\[((\d+)(,(\d+))*)?\])"',
                output_string
            )
            size = int(match.group(1))
            time = int(match.group(2))
            certificate = match.group(3)

            # Write
            writer.writerow([
                os.path.basename(experiment[1]),
                experiment[0],
                size,
                time,
                certificate
            ])
            output.flush()
Ejemplo n.º 15
0
def run(dataset="cifar10", model_type="basic", epochs=25, experiment_id="SP3"):

    attack_name = "targeted_pgd"
    logger.info("running {} {} {}".format(dataset, model_type, attack_name))

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)

    quality = 100
    extension = "png"
    l = 100

    msg = "".join([strs[random.randint(0, len(strs) - 1)] for i in range(l)])

    # experiment_id = "SP3/1"
    # exp_time = _encode(msg, dataset, model_type, epochs, experiment_id,attack_name,attack_strength=5.,extension = extension,transformation="rotate")
    # _decode( dataset, model_type, epochs, experiment_id,attack_name,exp_time,extension = extension)

    experiment_id = "SP3/2"
    exp_time = _encode(msg,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       attack_strength=5.,
                       extension=extension,
                       transformation="crop")
    _decode(dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            exp_time,
            extension=extension)

    experiment_id = "SP3/3"
    exp_time = _encode(msg,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       attack_strength=5.,
                       extension=extension,
                       transformation="upscale")
    _decode(dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            exp_time,
            extension=extension)
Ejemplo n.º 16
0
def main():
    """Run experiments"""

    # Compute datasets
    datasets = list(
        map(lambda d: str(SNAP_DATA_DIR / (d + SNAP_DATA_EXT)), preprocessed))

    # Log
    logger.info('Starting CPLEX Experiment')

    # Generate experiments
    experiments = product(PREPROCESSING_TIMEOUTS, datasets)

    # Open output file
    with open(str(CPLEX_RESULTS_DATA_FILE), 'w') as output:

        # Get writer
        writer = csv.writer(output)

        # Write header
        writer.writerow(HEURISTICS_CSV_HEADERS)

        # Run experiments
        for experiment in experiments:

            try:

                # Log
                logger.info('Starting experiment timeout={} dataset={}'.format(
                    *experiment))

                # Run cplex
                solution = solve(read_edgelist(experiment[1]),
                                 formulation='VC',
                                 solver='CPLEX',
                                 threads=4,
                                 timelimit=experiment[0],
                                 convert_to_oct=True)

                # Write
                writer.writerow([
                    os.path.basename(experiment[1]), experiment[0],
                    solution.opt, solution.time, solution.certificate
                ])
                output.flush()

            except Exception as e:

                logger.error(e)
Ejemplo n.º 17
0
def main():
    """Parse arguments and run experiment."""
    logger.info('Starting FCL experiment.')

    # List solvers to use
    solvers = [(AI, _run_ai), (ILP, _run_ilp), (IC, _run_ic)]

    # Open results file context
    with open(RESULTS_FILE, 'w') as results_file_fd:

        # Create CSV writer and write heuristic header row
        csv_writer = csv.writer(results_file_fd)
        csv_writer.writerow(OUTPUT_HEADER)

        # Run against all available datasets
        for dataset in _datasets():

            logger.info('Processing dataset {}'.format(dataset))

            # Execute for each dataset
            for name in _fcls_from(dataset):

                logger.info('Solving {}'.format(name))

                # Execute for each solver
                for sn, solver in solvers:

                    logger.info('Running solver {}'.format(sn))

                    # Try to execute heuristics
                    try:

                        size, time, certificate = solver(dataset, name)

                        # Write results
                        csv_writer.writerow([
                            '{}/{}'.format(dataset, name), sn, time, size,
                            certificate
                        ])

                    except Exception as e:

                        # Log error
                        logger.error(e)

                        # Write empty row
                        csv_writer.writerow([
                            name, sn,
                            float('nan'),
                            float('nan'),
                            float('nan')
                        ])

                    finally:

                        # Flush results so we at least get partial results if
                        # something catastrophic causes the script to crash.
                        results_file_fd.flush()
Ejemplo n.º 18
0
def run(dataset="cifar10", model_type="basic", epochs=25, experiment_id="SP2"):

    attack_name = "targeted_pgd"
    logger.info("running {} {} {}".format(dataset, model_type, attack_name))

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)

    experiment_id = "SP2/1"
    quality = 100
    extension = "png"
    msg1 = "Hello"
    exp_time = _encode(msg1,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       quality=quality,
                       attack_strength=1.,
                       extension=extension)

    experiment_id = "SP2/2"
    exp_time = _encode(msg1,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       quality=quality,
                       attack_strength=1.,
                       extension=extension,
                       keep_one=True)

    experiment_id = "SP2/3"
    l = 100
    msg2 = "".join([strs[random.randint(0, len(strs) - 1)] for i in range(l)])
    exp_time = _encode(msg2,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       quality=quality,
                       attack_strength=1.,
                       extension=extension)
Ejemplo n.º 19
0
def run(dataset="cifar10", model_type="basic", epochs=25, experiment_id=5):

    attack_name = "targeted_pgd"
    logger.info("running {} {} {}".format(dataset, model_type, attack_name))

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)

    l = 100
    msg = "".join([strs[random.randint(0, len(strs) - 1)] for i in range(l)])

    exp_time = _encode(msg,
                       dataset,
                       model_type,
                       epochs,
                       experiment_id,
                       attack_name,
                       quality=100,
                       attack_strength=1.)
    _decode(dataset, model_type, epochs, experiment_id, attack_name, exp_time)
Ejemplo n.º 20
0
def _encode_adv(msg,
                dataset,
                model_type,
                epochs,
                experiment_id,
                attack_name,
                attack_strength=2.0):

    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg, encoded_msg))
    test_size = len(encoded_msg)

    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    num_classes = 10

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test, verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    targets = np.array(
        to_categorical([int(i) for i in encoded_msg], num_classes), "int32")
    #print(targets)

    adv_x = craft_attack(model,
                         x,
                         attack_name,
                         y=targets,
                         epsilon=attack_strength)

    return x, y, adv_x, model, targets
Ejemplo n.º 21
0
def main():
    """Run experiment."""

    # Open output file for writing
    with open(str(HUFFNER_RESULTS_DATA_FILE), 'w') as output:

        # Get writer
        writer = csv.writer(output)

        # Write header
        writer.writerow(HUFFNER_CSV_HEADERS)

        # Run all combinations of dataset and timeouts
        experiments = product(preprocessed, PREPROCESSING_TIMEOUTS)
        for dataset, timeout in experiments:

            # Log
            logger.info('Computing OCT for {} with timeout={}'.format(
                dataset, timeout))

            # Determine preprocessing level and solver name
            if timeout <= 0.1:
                preprocessing = PREPROCESSING_ENSEMBLE
                solver = HUFFNER_P1
            else:
                preprocessing = PREPROCESSING_DENSITY
                solver = HUFFNER_P2

            # Solve
            solution = solve(str(HUFFNER_DATA_DIR /
                                 (dataset + HUFFNER_DATA_EXT)),
                             timeout=timeout,
                             preprocessing=preprocessing,
                             htime=min(0.3 * timeout, 1))

            # Write output
            writer.writerow([dataset, solver, timeout, *solution])
            output.flush()
Ejemplo n.º 22
0
def run(dataset="cifar10", model_type="basic", epochs=25, experiment_id="SP8"):

    folder = "./experiments/results/experiment{}".format(experiment_id)
    os.makedirs(folder, exist_ok=True)

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)

    model, x_train, x_test, y_train, y_test = load_model(dataset="cifar10",
                                                         model_type="basic",
                                                         epochs=25)

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    best_rates = {}
    all_rates = {}
    powers = (1, 1.5, 2, 2.5)
    dataset_classes = 10
    classes = (2, )
    classes = (3, 4, 5, 6, 7)
    epsilons = (0.5, 1., 2, 3)
    max_iters = (100, 200, 500)
    nb_elements = 1000

    for nb_classes in classes:
        best_rate = 0
        best_combination = {}
        all_combinations = []
        for power in powers:
            for epsilon in epsilons:
                for max_iter in max_iters:
                    if RANDOM_SEED > 0:
                        np.random.seed(RANDOM_SEED)

                    order = np.random.randint(0, dataset_classes,
                                              (nb_elements, nb_classes))
                    SATA.power = power
                    adv_x = SATA.craft(model,
                                       x_test[:nb_elements],
                                       order,
                                       epsilon=epsilon,
                                       max_iter=max_iter)
                    combination = {
                        "nb_elements": nb_elements,
                        "max_iter": max_iter,
                        "epsilon": epsilon,
                        "power": power,
                        "rate": SATA.rate_best
                    }
                    all_combinations.append(combination)
                    if SATA.rate_best > best_rate:
                        best_rate = SATA.rate_best
                        best_combination = combination

                    logger.info("class {}, combination {}".format(
                        nb_classes, combination))

        all_rates[nb_classes] = all_combinations
        best_rates[nb_classes] = best_combination

    with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
        f.write("{}".format(json.dumps({
            "all": all_rates,
            "best": best_rates
        })))

    logger.info("{}".format(best_rates.items()))
Ejemplo n.º 23
0
def _encode(msg,
            dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            attack_strength=2.0):
    extension = default_extension
    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg, encoded_msg))
    test_size = len(encoded_msg)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs,
                                                         use_tensorboard=True)
    num_classes = DATASET_CLASSES[dataset]

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test, verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    chunk_size = int(math.log(num_classes) / math.log(10))
    #groups = _split_msg(encoded_msg, chunk_size)

    targets = np.array(
        to_categorical([int(i) for i in encoded_msg], num_classes), "int32")
    #print(targets)

    class_density = 0.03  # total class * density = total attacked classes
    epsilon = 5.0
    max_iter = 100
    SATA.power = 1.5
    nb_elements = 1000

    adv_x, rate_best = SATA.embed_message(model,
                                          x_test[:nb_elements],
                                          encoded_msg,
                                          epsilon=epsilon,
                                          class_density=class_density)

    #adv_x = craft_attack(model,x,attack_name,y=targets, epsilon=attack_strength)
    yadv = np.argmax(model.predict(adv_x), axis=1)

    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    os.makedirs(pictures_path, exist_ok=True)
    os.makedirs("{}/ref".format(pictures_path), exist_ok=True)
    SSIM = []
    PSNR = []

    for i, _adv in enumerate(adv_x):
        predicted = yadv[i]
        encoded = np.argmax(targets[i])
        truth = np.argmax(y[i])
        adv_path = "{}/{}_predicted{}_encoded{}_truth{}.{}".format(
            pictures_path, i, predicted, encoded, truth, extension)
        real_path = "{}/ref/{}.{}".format(pictures_path, i, extension)

        adv = array_to_img(_adv)
        adv = adv.resize((16, 16), Image.BILINEAR)
        #adv = adv.convert("L")
        adv.save(adv_path)

        adv_loaded = _load_image(adv_path)

        real_img = array_to_img(x[i])
        real_img = real_img.resize((16, 16), Image.BILINEAR)
        #real_img = real_img.convert("L")
        #real = np.squeeze(img_to_array(real_img))
        real = img_to_array(real_img)

        ssim = 1 - np.array(
            list(
                ssim_distance(None,
                              np.array([real]),
                              adv_x=np.array([adv_loaded]),
                              distance_only=True)))
        SSIM.append(ssim)

        psnr = _psnr_loss(adv_loaded, real)
        PSNR.append(psnr)

    return np.array(SSIM).mean(), np.array(PSNR).mean()
Ejemplo n.º 24
0
def run(dataset="cifar100", model_type="basic", epochs=50, exp_id="SP_sata"):

    if RANDOM_SEED > 0:
        # random.seed(RANDOM_SEED)
        # np.random.seed(RANDOM_SEED)
        pass

    l = 33000  #6643

    num_classes_tbl = (10000, )
    #num_classes_tbl =[10000]
    nb_trials = 100
    nb_steps = 1000

    nb_pictures = []
    nb_pixels = 32 * 32 * 3

    def plot(nb_pictures):
        for i, nb_picture in enumerate(nb_pictures):
            # plt.figure()
            # plt.title("Embedding density for N={}".format(num_classes_tbl[i]))
            # plt.xlabel('Nb classes embedded per image (k)')
            # plt.ylabel('Embedding Density (Bit Per Pixel)')

            density = l / (nb_pixels * np.array(nb_picture))
            x = np.arange(len(nb_picture)) + 1
            x = x * (num_classes_tbl[i] / nb_steps)

            fig, ax1 = plt.subplots()

            color = 'tab:red'
            ax1.set_xlabel('Nb classes embedded per image (k)')
            ax1.set_ylabel('Embedding Density (Bits Per Pixel)', color=color)
            ax1.plot(x, density, color=color, linewidth=2)
            ax1.tick_params(axis='y', labelcolor=color)

            ax2 = ax1.twinx(
            )  # instantiate a second axes that shares the same x-axis

            color = 'tab:blue'
            ax2.set_ylabel(
                'Number of images to embed the message',
                color=color)  # we already handled the x-label with ax1
            ax2.plot(x, nb_picture, color=color, linewidth=2)
            ax2.tick_params(axis='y', labelcolor=color)

            fig.tight_layout(
            )  # otherwise the right y-label is slightly clipped
            plt.title("Embedding density for N={}".format(num_classes_tbl[i]))
            #plt.plot(x,density)

            print("density", density)
            print("nb_picture", nb_picture)

        plt.show()

    for num_classes in num_classes_tbl:
        nb_pictures.append([])
        logger.info("total classes {}".format(num_classes))
        for nb_embedded_classes in range(1, nb_steps):
            num_embedded_classes = int(nb_embedded_classes *
                                       (num_classes / nb_steps))
            nb_imgs = []
            for j in range(nb_trials):
                msg = "".join(
                    [strs[random.randint(0,
                                         len(strs) - 1)] for i in range(l)])
                encoded_msg = _encodeString(msg)
                logger.info("Encode message {}=>{}".format(msg, encoded_msg))

                groups = SATA.embed_message(
                    None,
                    None,
                    encoded_msg,
                    epsilon=None,
                    class_density=num_embedded_classes / num_classes,
                    num_classes=num_classes,
                    groups_only=True)
                # logger.info("{}:{}".format(num_embedded_classes,len(groups)))
                nb_imgs.append(len(groups))
            nb_pictures[-1].append(np.array(nb_imgs).mean())

    #densities = l/ (nb_pixels * np.array(nb_pictures))

    plot(nb_pictures)

    return
    folder = "./experiments/results/experiment{}".format("sata")
    os.makedirs(folder, exist_ok=True)
    with open("{}/{}.json".format(folder, "_".join(num_classes_tbl)),
              'a') as f:
        f.write("{}".format(json.dumps(nb_pictures)))
Ejemplo n.º 25
0
def run(dataset="cifar10",
        model_type="basic",
        epochs=25,
        experiment_id="SP9_6"):

    attack_name = "targeted_pgd"
    logger.info("running {} {} {}".format(dataset, model_type, attack_name))
    folder = "./experiments/results/experiment{}".format(experiment_id)

    os.makedirs("{}".format(folder), exist_ok=True)

    if RANDOM_SEED > 0:
        random.seed(RANDOM_SEED)

    quality = 100
    extension = "png"
    l = 100
    msg = "".join([strs[random.randint(0, len(strs) - 1)] for i in range(l)])

    recovery_rates = []
    extension = "h5"
    models_path = "../products/run31c/cifar/ee50_te300_mr0.1_sr0.2_1565783786"
    max_models = 150
    skip = 0
    with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
        f.write("[")

    for file in os.listdir(models_path):
        if max_models == 0:
            break
        if (file.startswith("e4") or file.startswith("e5")
                or file.startswith("e6")) and file.endswith(
                    ".{}".format(extension)):

            if skip > 0:
                skip = skip - 1
                continue

            max_models = max_models - 1
            model_type = "{}/{}".format(models_path, file)
            exp_time = 0
            atk_strength = 5.0

            x, y, adv_x, model, targets = _encode_adv(
                msg,
                dataset,
                "basic",
                epochs,
                experiment_id,
                attack_name,
                attack_strength=atk_strength)

            pic_folder = "{}/{}".format(folder, file)
            os.makedirs(pic_folder, exist_ok=True)
            files = []

            for i, adv in enumerate(adv_x):
                adv_path = "{}/{}.{}".format(pic_folder, i, extension)
                #save_img(adv_path,adv)
                #array_to_img(adv).save(adv_path)
                files.append(adv_path)

            rate = {"model": file}
            rate["stat_score_adv"] = _detect_spa(files, imgs=adv_x)
            rate["stat_score_real"] = _detect_spa(files, imgs=x)

            # rate["lsb_score_adv"] = _detect_e4s_srm(files, model_file="e4s_srm_bossbase_lsbm0.40_gs.model",imgs=adv_x)
            # rate["lsb_score_real"] = _detect_e4s_srm(files, model_file="e4s_srm_bossbase_lsbm0.40_gs.model",imgs=x)
            # rate["hill_score"] = _detect_e4s_srm(files, model_file="e4s_srm_bossbase_hill0.40_gs.model")
            # rate["wow_score"] = _detect_e4s_srm(files, model_file="e4s_srm_bossbase_wow0.40_gs.model")
            # rate["uniw_score"] = _detect_e4s_srm(files, model_file="e4s_srm_bossbase_uniw0.40_gs.model")

            recovery_rates.append(rate)

            with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
                f.write("{},".format(json.dumps(rate)))

    logger.info(recovery_rates)
    with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
        f.write("]")

    return
Ejemplo n.º 26
0
def run(dataset="cifar10",model_type="basic", epochs = 25, experiment_id="SP9", start=0):

    attack_name = "targeted_pgd"
    logger.info("running {} {} {}".format(dataset,model_type, attack_name))
    folder = "./experiments/results/experiment{}".format(experiment_id)
    os.makedirs("{}".format(folder), exist_ok =True)

    if RANDOM_SEED>0:
        random.seed(RANDOM_SEED)

    quality=100
    extension = "png"
    l = 100    
    msg = "".join([strs[random.randint(0,len(strs)-1)] for i in range(l)])

    recovery_rates = []
    extension = "h5"
    models_path = "../products/run31c/cifar/ee50_te300_mr0.1_sr0.2_1565783786"
    max_models = 150
    skip = start
    with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
        f.write("[")
            
    for file in os.listdir(models_path):
        if max_models ==0:
            break
        if (file.startswith("e4") or file.startswith("e5") or file.startswith("e6") ) and file.endswith(".{}".format(extension)):
            
            if skip>0:
                skip = skip-1
                continue

            max_models = max_models-1
            model_type = "{}/{}".format(models_path,file)
            exp_time = 0
        
            experiment_id = "SP9/1"
            advs = _encode(msg, dataset, model_type, epochs, experiment_id,attack_name,attack_strength=5.,extension = extension,transformation="rotate")
            rotate_recovery = _decode( dataset, model_type, epochs, experiment_id,attack_name,exp_time,extension = extension, advs=advs)

            experiment_id = "SP9/2"
            advs = _encode(msg, dataset, model_type, epochs, experiment_id,attack_name,attack_strength=5.,extension = extension,transformation="crop")
            crop_recovery = _decode( dataset, model_type, epochs, experiment_id,attack_name,exp_time,extension = extension, advs=advs)

            experiment_id = "SP9/3"
            advs = _encode(msg, dataset, model_type, epochs, experiment_id,attack_name,attack_strength=5.,extension = extension,transformation="upscale")
            upscale_recovery = _decode( dataset, model_type, epochs, experiment_id,attack_name,exp_time,extension = extension, advs=advs)

            experiment_id = "SP9/4"
            advs = _encode(msg, dataset, model_type, epochs, experiment_id,attack_name,attack_strength=5.,extension = "jpg",transformation="compress")
            compress_recovery = _decode( dataset, model_type, epochs, experiment_id,attack_name,exp_time,extension = "jpg", advs=advs)


            rate = {"model":file,"rotate_recovery":rotate_recovery,"crop_recovery":crop_recovery,"upscale_recovery":upscale_recovery,"compress_recovery":compress_recovery}
            recovery_rates.append(rate)

            with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
                f.write("{},".format(json.dumps(rate)))

    logger.info(recovery_rates)
    with open("{}/{}.json".format(folder, experiment_time), 'a') as f:
            f.write("]")
    
    
    return 
Ejemplo n.º 27
0
def _encode(msg,
            dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            keep_one=False,
            quality=100,
            attack_strength=2.0,
            extension=None):
    if not extension:
        extension = default_extension
    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg, encoded_msg))
    test_size = len(encoded_msg)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    num_classes = 10

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test, verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    targets = np.array(
        to_categorical([int(i) for i in encoded_msg], num_classes), "int32")
    #print(targets)

    if keep_one:
        x = np.repeat(np.array([x[0, :, :, :]]), y.shape[0], axis=0)
        y = model.predict(x)
    adv_x = craft_attack(model,
                         x,
                         attack_name,
                         y=targets,
                         epsilon=attack_strength)
    yadv = np.argmax(model.predict(adv_x), axis=1)

    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    os.makedirs(pictures_path, exist_ok=True)
    os.makedirs("{}/ref".format(pictures_path), exist_ok=True)

    SSIM = 1 - np.array(
        list(ssim_distance(model, x, adv_x=adv_x, distance_only=True)))
    LPIPS = np.array(
        list(lpips_distance(model, x, adv_x=adv_x, distance_only=True)))
    PSNR = np.array([_psnr_loss(x[i], adv_x[i]) for i in range(len(x))])

    _compressed = _compress_batch(adv_x)
    SSIM1 = 1 - np.array(
        list(ssim_distance(model, x, adv_x=_compressed, distance_only=True)))
    LPIPS1 = np.array(
        list(lpips_distance(model, x, adv_x=_compressed, distance_only=True)))
    PSNR1 = np.array([_psnr_loss(x[i], _compressed[i]) for i in range(len(x))])

    mean_LPIPS, var_LPIPS = LPIPS.mean(axis=0), LPIPS.var(axis=0)
    mean_LPIPS1, var_LPIPS1 = LPIPS1.mean(axis=0), LPIPS1.var(axis=0)
    logger.info("LPIPS mean:{} var:{}".format(mean_LPIPS, var_LPIPS))
    logger.info("LPIPS1 mean:{} var:{}".format(mean_LPIPS1, var_LPIPS1))

    mean_PSNR, var_PSNR = PSNR.mean(axis=0), PSNR.var(axis=0)
    mean_PSNR1, var_PSNR1 = PSNR1.mean(axis=0), PSNR1.var(axis=0)
    logger.info("PSNR mean:{} var:{}".format(mean_PSNR, var_PSNR))
    logger.info("PSNR1 mean:{} var:{}".format(mean_PSNR1, var_PSNR1))

    mean_SSIM, var_SSIM = SSIM.mean(axis=0), SSIM.var(axis=0)
    mean_SSIM1, var_SSIM1 = SSIM1.mean(axis=0), SSIM1.var(axis=0)
    logger.info("SSIM mean:{} var:{}".format(mean_SSIM, var_SSIM))
    logger.info("SSIM1 mean:{} var:{}".format(mean_SSIM1, var_SSIM1))

    return experiment_time
Ejemplo n.º 28
0
def _encode(msg,
            dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            keep_one=False,
            quality=100,
            attack_strength=2.0,
            extension=None):
    if not extension:
        extension = default_extension
    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg, encoded_msg))
    test_size = len(encoded_msg)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    num_classes = 10

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test, verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    targets = np.array(
        to_categorical([int(i) for i in encoded_msg], num_classes), "int32")
    #print(targets)

    if keep_one:
        x = np.repeat(np.array([x[0, :, :, :]]), y.shape[0], axis=0)
        y = model.predict(x)
    adv_x = craft_attack(model,
                         x,
                         attack_name,
                         y=targets,
                         epsilon=attack_strength)
    yadv = np.argmax(model.predict(adv_x), axis=1)

    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    os.makedirs(pictures_path, exist_ok=True)
    os.makedirs("{}/ref".format(pictures_path), exist_ok=True)

    for i, adv in enumerate(adv_x):
        predicted = yadv[i]
        encoded = np.argmax(targets[i])
        truth = np.argmax(y[i])
        adv_path = "{}/{}_predicted{}_encoded{}_truth{}.{}".format(
            pictures_path, i, predicted, encoded, truth, extension)
        real_path = "{}/ref/{}.{}".format(pictures_path, i, extension)

        if extension == "png":
            q = int(10 - quality / 100)
            save_img(adv_path, adv, compress_level=q)
            save_img(real_path, x[i], compress_level=q)
        elif extension == "jpg":
            save_img(adv_path, adv, quality=quality)
            save_img(real_path, x[i], quality=quality)

    return experiment_time
Ejemplo n.º 29
0
def _encode(msg,
            dataset,
            model_type,
            epochs,
            experiment_id,
            attack_name,
            attack_strength=5.0,
            nb_classes=2,
            experiment_time=""):
    print(dataset, model_type, epochs)
    extension = default_extension
    encoded_msg = _encodeString(msg)
    logger.info("Encode message {}=>{}".format(msg, encoded_msg))
    test_size = len(encoded_msg)
    model, x_train, x_test, y_train, y_test = load_model(dataset=dataset,
                                                         model_type=model_type,
                                                         epochs=epochs)
    num_classes = DATASET_CLASSES[dataset]

    combined = list(zip(x_test, y_test))
    random.shuffle(combined)
    x_test[:], y_test[:] = zip(*combined)

    #keep only correctly predicted inputs
    batch_size = 64
    preds_test = np.argmax(model.predict(x_test, verbose=0), axis=1)
    inds_correct = np.where(preds_test == y_test.argmax(axis=1))[0]
    x, y = x_test[inds_correct], y_test[inds_correct]
    x, y = x[:test_size], y[:test_size]

    chunk_size = int(math.log(num_classes) / math.log(10))
    #groups = _split_msg(encoded_msg, chunk_size)

    targets = np.array(
        to_categorical([int(i) for i in encoded_msg], num_classes), "int32")
    #print(targets)

    epsilon = 5.0
    max_iter = 100
    SATA.power = 1.5
    nb_elements = 1000

    sub_x = x_test[:nb_elements]
    sub_y = y_test[:nb_elements]

    begin = time.time()
    adv_x, ref_x, rate_best = SATA.embed_message(model,
                                                 sub_x,
                                                 encoded_msg,
                                                 epsilon=epsilon,
                                                 nb_classes_per_img=nb_classes)
    ref_y = np.argmax(model.predict(ref_x, verbose=0), axis=1)
    end = time.time()

    #adv_x = craft_attack(model,x,attack_name,y=targets, epsilon=attack_strength)
    adv_y = np.argmax(model.predict(adv_x), axis=1)
    nb_required_imgs = adv_y.shape[0]
    stats = [
        end, begin, msg, nb_required_imgs, nb_classes, epsilon, max_iter,
        SATA.power, nb_elements
    ]
    print("nb images required: {}".format(nb_required_imgs))

    pictures_path = default_path.format(experiment_id, attack_name,
                                        experiment_time)
    os.makedirs(pictures_path, exist_ok=True)

    np.save("{}/ref_x.npy".format(pictures_path), ref_x)
    np.save("{}/ref_y.npy".format(pictures_path), ref_y)
    np.save("{}/adv_x.npy".format(pictures_path), adv_x)
    np.save("{}/adv_y.npy".format(pictures_path), adv_y)
    np.save("{}/stats.npy".format(pictures_path), np.array(stats))
Ejemplo n.º 30
0
    def generate(self, x_all, order, threshold=0.1, nb_classes=10):
        self.targeted = True

        y0 = []

        for o in order:
            y = [0] * nb_classes
            for i, j in enumerate(o):
                y[j] = max(0, math.pow(1 - i * threshold, SATA.power))
            y0.append(y)
            #print(y)

        y = np.array(y0)
        """
        Generate adversarial samples and return them in an array.

        :param x: An array with the original inputs.
        :type x: `np.ndarray`
        :param y: The labels for the data `x`. Only provide this parameter if you'd like to use true
                  labels when crafting adversarial samples. Otherwise, model predictions are used as labels to avoid the
                  "label leaking" effect (explained in this paper: https://arxiv.org/abs/1611.01236). Default is `None`.
                  Labels should be one-hot-encoded.
        :type y: `np.ndarray`
        :return: An array holding the adversarial examples.
        :rtype: `np.ndarray`
        """

        targets = y.copy()
        logger.info('Nb images to embed the message {}'.format(
            targets.shape[0]))

        adv_x_best_index = [False] * len(order)
        adv_x_best = None
        rate_best = 0.0

        for i in range(SATA.num_cover_init):
            logger.info('Random inputs pick iteration index {}'.format(i))
            random.shuffle(x_all)
            x = x_all.copy()[0:len(order)]
            SATA.success_rates.append([])
            for i_random_init in range(max(1, self.num_random_init)):
                adv_x = x  #.astype(NUMPY_DTYPE)
                SATA.success_rates[-1].append([])

                for i_max_iter in range(self.max_iter):

                    adv_x = self._compute(
                        adv_x, targets, self.eps, self.eps_step,
                        self.num_random_init > 0 and i_max_iter == 0)

                    if self._project:
                        noise = projection(adv_x - x, self.eps, self.norm)
                        adv_x = x + noise

                adv_x = np.array([
                    adv_x[i] if adv_x_best is None or not adv_x_best_index[i]
                    else adv_x_best[i] for i in range(len(adv_x))
                ])
                rate, adv_x_best_index = self.compute_success(
                    self.classifier, x, targets, adv_x, self.targeted)
                rate = 100 * rate
                SATA.success_rates[-1][-1].append(rate)

                if adv_x_best is None:
                    adv_x_best = adv_x
                    rate_best = rate

                elif rate > rate_best:
                    rate_best = rate
                    adv_x_best = np.where(adv_x_best_index == True, adv_x,
                                          adv_x_best)

                logger.info('Success rate of SATA attack: %.2f%%', rate_best)
                SATA.rate_best = rate_best
                if rate_best == 100:
                    return adv_x_best, x

        return adv_x_best, x