Exemple #1
0
        "TSNE of Sign of Adv Gradients, SNNLCrossEntropy Model, factor:" +
        str(FLAGS.SNNL_factor),
        fontsize=42)
    imscatter(X_embedded, x_test[:batch_size], zoom=2, cmap="Purples")
    plt.savefig(output_dir + 'adversarial_gradients_SNNL_factor_' +
                str(SNNL_factor) + '.png')


def main(argv=None):
    SNNL_example(nb_epochs=FLAGS.nb_epochs,
                 batch_size=FLAGS.batch_size,
                 learning_rate=FLAGS.learning_rate,
                 nb_filters=FLAGS.nb_filters,
                 SNNL_factor=FLAGS.SNNL_factor,
                 output_dir=FLAGS.output_dir)


if __name__ == '__main__':
    flags.DEFINE_integer('nb_filters', NB_FILTERS, 'Model size multiplier')
    flags.DEFINE_integer('nb_epochs', NB_EPOCHS,
                         'Number of epochs to train model')
    flags.DEFINE_integer('batch_size', BATCH_SIZE, 'Size of training batches')
    flags.DEFINE_float('SNNL_factor', SNNL_FACTOR,
                       'Multiplier for Soft Nearest Neighbor Loss')
    flags.DEFINE_float('learning_rate', LEARNING_RATE,
                       'Learning rate for training')
    flags.DEFINE_string('output_dir', OUTPUT_DIR,
                        'output directory for saving figures')

    tf.app.run()
Exemple #2
0
    flags.DEFINE_boolean('save', True, 'Whether to save from a checkpoint.')
    flags.DEFINE_string('save_dir', 'runs/X', 'Location to store logs/model.')
    flags.DEFINE_string('model_type', 'madry',
                        'Model type: basic|madry|resnet_tf.')
    flags.DEFINE_string(
        'attack_type_train', 'MadryEtAl_y_multigpu',
        'Attack type for adversarial training:\
                        FGSM|MadryEtAl{,_y}{,_multigpu}.')
    flags.DEFINE_string('attack_type_test', 'FGSM',
                        'Attack type for test: FGSM|MadryEtAl{,_y}.')
    flags.DEFINE_string('dataset', 'mnist', 'Dataset mnist|cifar10.')
    flags.DEFINE_boolean(
        'only_adv_train', False,
        'Do not train with clean examples when adv training.')
    flags.DEFINE_integer('save_steps', 50, 'Save model per X steps.')
    flags.DEFINE_integer('attack_nb_iter_train', None,
                         'Number of iterations of training attack.')
    flags.DEFINE_integer('eval_iters', 1, 'Evaluate every X steps.')
    flags.DEFINE_integer('lrn_step', 30000, 'Step to decrease learning rate'
                         'for ResNet.')
    flags.DEFINE_float('adam_lrn', 0.001, 'Learning rate for Adam Optimizer.')
    flags.DEFINE_float('mom_lrn', 0.1, 'Learning rate for Momentum Optimizer.')
    flags.DEFINE_integer('ngpu', 1, 'Number of gpus.')
    flags.DEFINE_integer('sync_step', 1, 'Sync params frequency.')
    flags.DEFINE_boolean('fast_tests', False, 'Fast tests against attacks.')
    flags.DEFINE_string(
        'data_path', './datasets/', 'Path to datasets.'
        'Each dataset should be in a subdirectory.')

    app.run()
def main(argv=None):
  """
  Print accuracies
  """
  try:
    _name_of_script, filepath = argv
  except ValueError:
    raise ValueError(argv)
  print_accuracies(filepath=filepath, test_start=FLAGS.test_start,
                   test_end=FLAGS.test_end, which_set=FLAGS.which_set,
                   nb_iter=FLAGS.nb_iter, base_eps_iter=FLAGS.base_eps_iter,
                   batch_size=FLAGS.batch_size)


if __name__ == '__main__':
  flags.DEFINE_integer('train_start', TRAIN_START, 'Starting point (inclusive)'
                       'of range of train examples to use')
  flags.DEFINE_integer('train_end', TRAIN_END, 'Ending point (non-inclusive) '
                       'of range of train examples to use')
  flags.DEFINE_integer('test_start', TEST_START, 'Starting point (inclusive) '
                       'of range of test examples to use')
  flags.DEFINE_integer('test_end', TEST_END, 'End point (non-inclusive) of '
                       'range of test examples to use')
  flags.DEFINE_integer('nb_iter', NB_ITER, 'Number of iterations of PGD')
  flags.DEFINE_string('which_set', WHICH_SET, '"train" or "test"')
  flags.DEFINE_integer('batch_size', BATCH_SIZE,
                       'Batch size for most jobs')
  flags.DEFINE_float('base_eps_iter', BASE_EPS_ITER,
                     'epsilon per iteration, if data were in [0, 1]')
  tf.app.run()
        do_eval(preds2_adv, x_train, y_train, 'train_adv_train_adv_eval')

    return report


def main(argv=None):
    from src.FGSM.cleverhans.cleverhans_tutorials import check_installation
    check_installation(__file__)

    cifar10_tutorial(nb_epochs=FLAGS.nb_epochs,
                     batch_size=FLAGS.batch_size,
                     learning_rate=FLAGS.learning_rate,
                     clean_train=FLAGS.clean_train,
                     backprop_through_attack=FLAGS.backprop_through_attack,
                     nb_filters=FLAGS.nb_filters)


if __name__ == '__main__':
    flags.DEFINE_integer('nb_filters', NB_FILTERS, 'Model size multiplier')
    flags.DEFINE_integer('nb_epochs', NB_EPOCHS,
                         'Number of epochs to train model')
    flags.DEFINE_integer('batch_size', BATCH_SIZE, 'Size of training batches')
    flags.DEFINE_float('learning_rate', LEARNING_RATE,
                       'Learning rate for training')
    flags.DEFINE_bool('clean_train', CLEAN_TRAIN, 'Train on clean examples')
    flags.DEFINE_bool('backprop_through_attack', BACKPROP_THROUGH_ATTACK,
                      ('If True, backprop through adversarial example '
                       'construction process during adversarial training'))

    tf.app.run()
Exemple #5
0
                   learning_rate=FLAGS.learning_rate,
                   nb_epochs=FLAGS.nb_epochs,
                   holdout=FLAGS.holdout,
                   data_aug=FLAGS.data_aug,
                   nb_epochs_s=FLAGS.nb_epochs_s,
                   lmbda=FLAGS.lmbda,
                   aug_batch_size=FLAGS.data_aug_batch_size)


if __name__ == '__main__':

    # General flags
    flags.DEFINE_integer('nb_classes', NB_CLASSES,
                         'Number of classes in problem')
    flags.DEFINE_integer('batch_size', BATCH_SIZE, 'Size of training batches')
    flags.DEFINE_float('learning_rate', LEARNING_RATE,
                       'Learning rate for training')

    # Flags related to oracle
    flags.DEFINE_integer('nb_epochs', NB_EPOCHS,
                         'Number of epochs to train model')

    # Flags related to substitute
    flags.DEFINE_integer('holdout', HOLDOUT, 'Test set holdout for adversary')
    flags.DEFINE_integer('data_aug', DATA_AUG,
                         'Number of substitute data augmentations')
    flags.DEFINE_integer('nb_epochs_s', NB_EPOCHS_S,
                         'Training epochs for substitute')
    flags.DEFINE_float('lmbda', LMBDA, 'Lambda from arxiv.org/abs/1602.02697')
    flags.DEFINE_integer('data_aug_batch_size', AUG_BATCH_SIZE,
                         'Batch size for augmentation')