def test_fprop(self): # Define empty model model = Model('model', 10, {}) x = [] # Exception is thrown when `fprop` not implemented with self.assertRaises(Exception) as context: model.fprop(x) self.assertTrue(context.exception)
def __init__(self, scope, nb_classes, nb_filters, **kwargs): del kwargs Model.__init__(self, scope, nb_classes, locals()) self.nb_filters = nb_filters # Do a dummy run of fprop to make sure the variables are created from # the start self.fprop(tf.placeholder(tf.float32, [128, 28, 28, 1])) # Put a reference to the params in self so that the params get pickled self.params = self.get_params()
def __init__(self, nb_classes=10): # NOTE: for compatibility with Madry Lab downloadable checkpoints, # we cannot use scopes, give these variables names, etc. self.W_conv1 = self._weight_variable([5, 5, 1, 32]) self.b_conv1 = self._bias_variable([32]) self.W_conv2 = self._weight_variable([5, 5, 32, 64]) self.b_conv2 = self._bias_variable([64]) self.W_fc1 = self._weight_variable([7 * 7 * 64, 1024]) self.b_fc1 = self._bias_variable([1024]) self.W_fc2 = self._weight_variable([1024, nb_classes]) self.b_fc2 = self._bias_variable([nb_classes]) Model.__init__(self, '', nb_classes, {})
def test_cache(): # Test that _CorrectFactory can be cached model = Model() factory_1 = _CorrectFactory(model) factory_2 = _CorrectFactory(model) cache = {} cache[factory_1] = True assert factory_2 in cache
def test_sess_generate_np(self): model = Model('model', 10, {}) class DummyAttack(Attack): def generate(self, x, **kwargs): return x attack = DummyAttack(model, back='tf', sess=None) with self.assertRaises(Exception) as context: attack.generate_np(0.) self.assertTrue(context.exception)
def __init__(self, nb_classes=10, nb_filters=64, dummy_input=tf.zeros((32, 28, 28, 1))): Model.__init__(self, nb_classes=nb_classes) # Parametes # number of filters, number of classes. self.nb_filters = nb_filters self.nb_classes = nb_classes # Lists for layers attributes. # layer names , layers, layer activations self.layer_names = [ 'input', 'conv_1', 'conv_2', 'conv_3', 'flatten', 'logits' ] self.layers = {} self.layer_acts = {} # layer definitions self.layers['conv_1'] = tf.layers.Conv2D(filters=self.nb_filters, kernel_size=8, strides=2, padding='same', activation=tf.nn.relu) self.layers['conv_2'] = tf.layers.Conv2D(filters=self.nb_filters * 2, kernel_size=6, strides=2, padding='valid', activation=tf.nn.relu) self.layers['conv_3'] = tf.layers.Conv2D(filters=self.nb_filters * 2, kernel_size=5, strides=1, padding='valid', activation=tf.nn.relu) self.layers['flatten'] = tf.layers.Flatten() self.layers['logits'] = tf.layers.Dense(self.nb_classes, activation=None) # Dummy fprop to activate the network. output = self.fprop(dummy_input)
def __init__(self, scope, nb_classes, nb_filters=200, **kwargs): del kwargs Model.__init__(self, scope, nb_classes, locals()) self.nb_filters = nb_filters
def test_sess(self): # Test that it is permitted to provide no session Attack(Model('model', 10, {}), back='tf', sess=None)
def __init__(self, scope='dummy_model', nb_classes=10, **kwargs): del kwargs Model.__init__(self, scope, nb_classes, locals())
def __init__(self, scope='trivial', nb_classes=2, **kwargs): del kwargs Model.__init__(self, scope, nb_classes, locals())
def test_parse(self): sess = tf.Session() test_attack = Attack(Model('model', 10, {}), back='tf', sess=sess) self.assertTrue(test_attack.parse_params({}))
def __init__(self, scope, nb_classes=1000, **kwargs): del kwargs Model.__init__(self, scope, nb_classes, locals())