def test_batch_generate_attack():
    """
    Attack with batch-generate.
    """
    input_np = np.random.random((128, 10)).astype(np.float32)
    label = np.random.randint(0, 10, 128).astype(np.int32)
    label = np.eye(10)[label].astype(np.float32)

    attack = FastGradientMethod(Net(), loss_fn=SoftmaxCrossEntropyWithLogits(sparse=False))
    ms_adv_x = attack.batch_generate(input_np, label, batch_size=32)

    assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
                                            ' must not be equal to original value.'
def test_fast_gradient_method_cpu():
    """
    Fast gradient method unit test.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
    input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
    label = np.asarray([2], np.int32)

    loss = SoftmaxCrossEntropyWithLogits(sparse=True)
    attack = FastGradientMethod(Net(), loss_fn=loss)
    ms_adv_x = attack.generate(input_np, label)

    assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'
def test_fast_gradient_method_gpu():
    """
    Fast gradient method unit test.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="GPU")
    input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
    label = np.asarray([2], np.int32)
    label = np.eye(3)[label].astype(np.float32)

    attack = FastGradientMethod(Net())
    ms_adv_x = attack.generate(input_np, label)

    assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'
def test_batch_generate():
    """
    Fast gradient method unit test.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    input_np = np.random.random([10, 3]).astype(np.float32)
    label = np.random.randint(0, 3, [10])
    label = np.eye(3)[label].astype(np.float32)

    loss_fn = SoftmaxCrossEntropyWithLogits(sparse=False)
    attack = FastGradientMethod(Net(), loss_fn=loss_fn)
    ms_adv_x = attack.batch_generate(input_np, label, 4)

    assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'
def test_fast_gradient_method_multi_inputs():
    """
    Fast gradient method unit test.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    inputs1 = np.asarray([[0.1, 0.2, 0.7]]).astype(np.float32)
    inputs2 = np.asarray([[0.4, 0.8, 0.5]]).astype(np.float32)
    labels1 = np.expand_dims(np.eye(3)[1].astype(np.float32), axis=0)
    labels2 = np.expand_dims(np.eye(3)[2].astype(np.float32), axis=0)

    with_loss_cell = WithLossCell(Net2(), LossNet())
    grad_with_loss_net = GradWrapWithLoss(with_loss_cell)
    attack = FastGradientMethod(grad_with_loss_net)
    ms_adv_x = attack.generate((inputs1, inputs2), (labels1, labels2))

    assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'
def test_batch_generate_multi_inputs():
    """
    Fast gradient method unit test.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    input_np = np.random.random([10, 3]).astype(np.float32)
    anno_np = np.random.random([10, 3]).astype(np.float32)
    label = np.random.randint(0, 3, [10])
    label = np.eye(3)[label].astype(np.float32)

    loss_fn = SoftmaxCrossEntropyWithLogits(sparse=False)
    with_loss_cell = WithLossCell(Net2(), loss_fn)
    grad_with_loss_net = GradWrapWithLoss(with_loss_cell)
    attack = FastGradientMethod(grad_with_loss_net)
    ms_adv_x = attack.generate(input_np, (anno_np, label))

    assert np.any(ms_adv_x != input_np), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'
def test_batch_generate_attack_multi_inputs():
    """
    Attack with batch-generate by multi-inputs.
    """
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    inputs1 = np.random.random((128, 10)).astype(np.float32)
    inputs2 = np.random.random((128, 10)).astype(np.float32)
    labels1 = np.random.randint(0, 10, 128).astype(np.int32)
    labels2 = np.random.randint(0, 10, 128).astype(np.int32)
    labels1 = np.eye(10)[labels1].astype(np.float32)
    labels2 = np.eye(10)[labels2].astype(np.float32)

    with_loss_cell = WithLossCell(Net2(), LossNet())
    grad_with_loss_net = GradWrapWithLoss(with_loss_cell)
    attack = FastGradientMethod(grad_with_loss_net)
    ms_adv_x = attack.batch_generate((inputs1, inputs2), (labels1, labels2), batch_size=32)

    assert np.any(ms_adv_x != inputs1), 'Fast gradient method: generate value' \
                                         ' must not be equal to original value.'