Exemplo n.º 1
0
def test_pynative_exponential():
    context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
    grad = Tensor([0.3, 0.2, 0.4], mstype.float32)
    norm_bound = 1.0
    initial_noise_multiplier = 0.1
    alpha = 0.5
    decay_policy = 'Exp'
    factory = NoiseMechanismsFactory()
    ada_noise = factory.create('AdaGaussian',
                               norm_bound,
                               initial_noise_multiplier,
                               noise_decay_rate=alpha,
                               decay_policy=decay_policy)
    ada_noise = ada_noise(grad)
    print('ada noise: ', ada_noise)
Exemplo n.º 2
0
def test_dp_model_with_graph_mode():
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    norm_bound = 1.0
    initial_noise_multiplier = 0.01
    network = Net()
    epochs = 1
    loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
    noise_mech = NoiseMechanismsFactory().create(
        'Gaussian',
        norm_bound=norm_bound,
        initial_noise_multiplier=initial_noise_multiplier)
    clip_mech = ClipMechanismsFactory().create('Gaussian',
                                               decay_policy='Linear',
                                               learning_rate=0.01,
                                               target_unclipped_quantile=0.9,
                                               fraction_stddev=0.01)
    net_opt = nn.Momentum(network.trainable_params(),
                          learning_rate=0.1,
                          momentum=0.9)
    model = DPModel(micro_batches=2,
                    clip_mech=clip_mech,
                    norm_bound=norm_bound,
                    noise_mech=noise_mech,
                    network=network,
                    loss_fn=loss,
                    optimizer=net_opt,
                    metrics=None)
    ms_ds = ds.GeneratorDataset(dataset_generator, ['data', 'label'])
    model.train(epochs, ms_ds, dataset_sink_mode=False)
Exemplo n.º 3
0
def test_dp_model_with_graph_mode_ada_gaussian():
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    norm_bound = 1.0
    initial_noise_multiplier = 0.01
    network = Net()
    batch_size = 32
    batches = 128
    epochs = 1
    alpha = 0.8
    loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True)
    noise_mech = NoiseMechanismsFactory().create(
        'AdaGaussian',
        norm_bound=norm_bound,
        initial_noise_multiplier=initial_noise_multiplier,
        noise_decay_rate=alpha,
        decay_policy='Exp')
    clip_mech = None
    net_opt = nn.Momentum(network.trainable_params(),
                          learning_rate=0.1,
                          momentum=0.9)
    model = DPModel(micro_batches=2,
                    clip_mech=clip_mech,
                    norm_bound=norm_bound,
                    noise_mech=noise_mech,
                    network=network,
                    loss_fn=loss,
                    optimizer=net_opt,
                    metrics=None)
    ms_ds = ds.GeneratorDataset(dataset_generator(batch_size, batches),
                                ['data', 'label'])
    model.train(epochs, ms_ds, dataset_sink_mode=False)
Exemplo n.º 4
0
def test_graph_factory():
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    grad = Tensor([0.3, 0.2, 0.4], mstype.float32)
    norm_bound = 1.0
    initial_noise_multiplier = 0.1
    alpha = 0.5
    decay_policy = 'Step'
    factory = NoiseMechanismsFactory()
    noise_mech = factory.create('Gaussian', norm_bound,
                                initial_noise_multiplier)
    noise = noise_mech(grad)
    print('Gaussian noise: ', noise)
    ada_noise_mech = factory.create('AdaGaussian',
                                    norm_bound,
                                    initial_noise_multiplier,
                                    noise_decay_rate=alpha,
                                    decay_policy=decay_policy)
    ada_noise = ada_noise_mech(grad)
    print('ada noise: ', ada_noise)
Exemplo n.º 5
0
    # get training dataset
    ds_train = generate_mnist_dataset(os.path.join(cfg.data_path, "train"),
                                      cfg.batch_size)

    if cfg.micro_batches and cfg.batch_size % cfg.micro_batches != 0:
        raise ValueError(
            "Number of micro_batches should divide evenly batch_size")
    # Create a factory class of DP noise mechanisms, this method is adding noise
    # in gradients while training. Initial_noise_multiplier is suggested to be
    # greater than 1.0, otherwise the privacy budget would be huge, which means
    # that the privacy protection effect is weak. Mechanisms can be 'Gaussian'
    # or 'AdaGaussian', in which noise would be decayed with 'AdaGaussian'
    # mechanism while be constant with 'Gaussian' mechanism.
    noise_mech = NoiseMechanismsFactory().create(
        cfg.noise_mechanisms,
        norm_bound=cfg.norm_bound,
        initial_noise_multiplier=cfg.initial_noise_multiplier,
        decay_policy='Exp')

    net_opt = nn.Momentum(params=network.trainable_params(),
                          learning_rate=cfg.lr,
                          momentum=cfg.momentum)
    # Create a monitor for DP training. The function of the monitor is to
    # compute and print the privacy budget(eps and delta) while training.
    rdp_monitor = PrivacyMonitorFactory.create(
        'rdp',
        num_samples=60000,
        batch_size=cfg.batch_size,
        initial_noise_multiplier=cfg.initial_noise_multiplier,
        per_print_times=234)
    # Create the DP model for training.