def test_pad(): """UT for projected adversarial defense.""" num_classes = 10 batch_size = 32 sparse = False context.set_context(mode=context.GRAPH_MODE) context.set_context(device_target='Ascend') # create test data inputs = np.random.rand(batch_size, 1, 32, 32).astype(np.float32) labels = np.random.randint(num_classes, size=batch_size).astype(np.int32) if not sparse: labels = np.eye(num_classes)[labels].astype(np.float32) # construct network net = Net() loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse) optimizer = Momentum(net.trainable_params(), 0.001, 0.9) # defense pad = ProjectedAdversarialDefense(net, loss_fn=loss_fn, optimizer=optimizer) LOGGER.set_level(logging.DEBUG) LOGGER.debug(TAG, '---start projected adversarial defense--') loss = pad.defense(inputs, labels) LOGGER.debug(TAG, '---end projected adversarial defense--') assert np.any(loss >= 0.0)
def test_ad(): """UT for adversarial defense.""" num_classes = 10 batch_size = 32 sparse = False context.set_context(mode=context.GRAPH_MODE) context.set_context(device_target='Ascend') # create test data inputs = np.random.rand(batch_size, 1, 32, 32).astype(np.float32) labels = np.random.randint(num_classes, size=batch_size).astype(np.int32) if not sparse: labels = np.eye(num_classes)[labels].astype(np.float32) net = Net() loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse) optimizer = Momentum(learning_rate=Tensor(np.array([0.001], np.float32)), momentum=0.9, params=net.trainable_params()) ad_defense = AdversarialDefense(net, loss_fn=loss_fn, optimizer=optimizer) LOGGER.set_level(logging.DEBUG) LOGGER.debug(TAG, '--start adversarial defense--') loss = ad_defense.defense(inputs, labels) LOGGER.debug(TAG, '--end adversarial defense--') assert np.any(loss >= 0.0)
def test_ead(): """UT for ensemble adversarial defense.""" num_classes = 10 batch_size = 64 sparse = False context.set_context(mode=context.GRAPH_MODE) context.set_context(device_target='Ascend') # create test data inputs = np.random.rand(batch_size, 1, 32, 32).astype(np.float32) labels = np.random.randint(num_classes, size=batch_size).astype(np.int32) if not sparse: labels = np.eye(num_classes)[labels].astype(np.float32) net = Net() loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse) optimizer = Momentum(net.trainable_params(), 0.001, 0.9) net = Net() fgsm = FastGradientSignMethod(net) pgd = ProjectedGradientDescent(net) ead = EnsembleAdversarialDefense(net, [fgsm, pgd], loss_fn=loss_fn, optimizer=optimizer) LOGGER.set_level(logging.DEBUG) LOGGER.debug(TAG, '---start ensemble adversarial defense--') loss = ead.defense(inputs, labels) LOGGER.debug(TAG, '---end ensemble adversarial defense--') assert np.any(loss >= 0.0)