コード例 #1
0
def gen_starting_points(model,
                        ys,
                        ys_target,
                        goal,
                        dataset_name,
                        session,
                        pred_fn,
                        cache=None):
    ''' Generate starting points which are already adversarial according to the adversarial goal.

    :param model: The model.
    :param ys: True labels.
    :param ys_target: Targetted labels.
    :param goal: Adversarial goal.
    :param dataset_name: The dataset's name. All valid values are ``'cifar10'`` and ``'imagenet'``.
    :param session: ``tf.Session`` for loading dataset.
    :param pred_fn: A function which accepts a batch of model inputs as a numpy array and returns the model's
        predictions.
    :param cache: A cache for reusing generated starting points. A dictionary. Same cache shall not be shared between
        different model and adversarial goal.
    :return: Starting points as a numpy array.
    '''
    if cache is None:
        cache = dict()

    starting_points = np.zeros((len(ys), *model.x_shape),
                               dtype=model.x_dtype.as_numpy_dtype)

    if goal in ('ut', 'tm'):
        for index, y in enumerate(ys):
            y = int(y)
            if y not in cache:
                while True:
                    x = np.random.uniform(low=model.x_min,
                                          high=model.x_max,
                                          size=(1, *model.x_shape))
                    x = x.astype(model.x_dtype.as_numpy_dtype)
                    x_pred = pred_fn(x)[0]
                    if x_pred != y:
                        cache[y] = x[0]
                        break
            starting_points[index] = cache[y]
    else:
        for index, y in enumerate(ys_target):
            if y not in cache:
                if dataset_name == 'cifar10':
                    dataset = cifar10.load_dataset_for_classifier(
                        model, target_label=y).batch(1)
                else:
                    dataset = imagenet.load_dataset_for_classifier(
                        model, target_label=y).batch(1)
                for _, x, _ in dataset_to_iterator(dataset, session):
                    x_pred = pred_fn(x)[0]
                    if x_pred == y:
                        cache[y] = x[0]
                        break
            starting_points[index] = cache[y]

    return starting_points
コード例 #2
0
from ares.model.loader import load_model_from_path
from ares.model.ensemble import EnsembleModel, EnsembleRandomnessModel

batch_size = 100

config = tf.ConfigProto()
config.gpu_options.allow_growth = True
session = tf.Session(config=config)

model_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                          '../example/cifar10/resnet56.py')
model = load_model_from_path(model_path).load(session)
e_model = EnsembleModel([model, model], [0.5, 0.5])
er_model = EnsembleRandomnessModel(model, 10, session)

ds = cifar10.load_dataset_for_classifier(model).batch(batch_size).take(1)
_, xs, ys = next(dataset_to_iterator(ds, session))

xs_ph = tf.placeholder(model.x_dtype, shape=(batch_size, *model.x_shape))

labels = model.labels(xs_ph)
e_labels = e_model.labels(xs_ph)
er_labels = er_model.labels(xs_ph)

labels_np = session.run(labels, feed_dict={xs_ph: xs})
e_labels_np = session.run(e_labels, feed_dict={xs_ph: xs})
er_labels_np = session.run(er_labels, feed_dict={xs_ph: xs})

assert (np.array_equal(labels_np, e_labels_np))
assert (np.array_equal(labels_np, er_labels_np))
コード例 #3
0
ファイル: attack_cli.py プロジェクト: Fugoes/realsafe
    if args.logger:
        config_kwargs['logger'] = logger

    print('Loading tensorflow session...')
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True
    session = tf.Session(config=config)

    print('Loading model...')
    model = load_model_from_path(args.model).load(session)

    print('Loading dataset...')
    if args.dataset == 'cifar10':
        from ares.dataset import cifar10
        dataset = cifar10.load_dataset_for_classifier(model,
                                                      offset=args.offset,
                                                      load_target=True)
    else:
        from ares.dataset import imagenet
        dataset = imagenet.load_dataset_for_classifier(model,
                                                       offset=args.offset,
                                                       load_target=True)
    dataset = dataset.take(args.count)

    print('Loading attack...')
    attack_name, batch_size, dataset_name = args.method, args.batch_size, args.dataset
    goal, distance_metric = args.goal, args.distance_metric

    kwargs = dict()
    for kwarg in ('learning_rate', 'cw_loss_c', 'samples_per_draw',
                  'init_distortion'):