Пример #1
0
def test_dp_model():
    context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
    l2_norm_bound = 1.0
    initial_noise_multiplier = 0.01
    net = LeNet5()
    batch_size = 32
    batches = 128
    epochs = 1
    loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
    optim = SGD(params=net.trainable_params(), learning_rate=0.1, momentum=0.9)
    gaussian_mech = DPOptimizerClassFactory()
    gaussian_mech.set_mechanisms(
        'Gaussian',
        norm_bound=l2_norm_bound,
        initial_noise_multiplier=initial_noise_multiplier)
    model = DPModel(micro_batches=2,
                    norm_clip=l2_norm_bound,
                    dp_mech=gaussian_mech.mech,
                    network=net,
                    loss_fn=loss,
                    optimizer=optim,
                    metrics=None)
    ms_ds = ds.GeneratorDataset(dataset_generator(batch_size, batches),
                                ['data', 'label'])
    ms_ds.set_dataset_size(batch_size * batches)
    model.train(epochs, ms_ds)
Пример #2
0
def test_dp_model_pynative_mode():
    context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend")
    norm_clip = 1.0
    initial_noise_multiplier = 0.01
    network = LeNet5()
    batch_size = 32
    batches = 128
    epochs = 1
    micro_batches = 2
    loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
    factory_opt = DPOptimizerClassFactory(micro_batches=micro_batches)
    factory_opt.set_mechanisms('Gaussian',
                               norm_bound=norm_clip,
                               initial_noise_multiplier=initial_noise_multiplier)
    net_opt = factory_opt.create('Momentum')(network.trainable_params(), learning_rate=0.1, momentum=0.9)
    model = DPModel(micro_batches=micro_batches,
                    norm_clip=norm_clip,
                    mech=None,
                    network=network,
                    loss_fn=loss,
                    optimizer=net_opt,
                    metrics=None)
    ms_ds = ds.GeneratorDataset(dataset_generator(batch_size, batches), ['data', 'label'])
    ms_ds.set_dataset_size(batch_size * batches)
    model.train(epochs, ms_ds, dataset_sink_mode=False)
Пример #3
0
def test_dp_model_with_graph_mode():
    context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
    norm_bound = 1.0
    initial_noise_multiplier = 0.01
    network = LeNet5()
    batch_size = 32
    batches = 128
    epochs = 1
    loss = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=True)
    noise_mech = NoiseMechanismsFactory().create(
        'Gaussian',
        norm_bound=norm_bound,
        initial_noise_multiplier=initial_noise_multiplier)
    clip_mech = ClipMechanismsFactory().create('Gaussian',
                                               decay_policy='Linear',
                                               learning_rate=0.01,
                                               target_unclipped_quantile=0.9,
                                               fraction_stddev=0.01)
    net_opt = nn.Momentum(network.trainable_params(),
                          learning_rate=0.1,
                          momentum=0.9)
    model = DPModel(micro_batches=2,
                    clip_mech=clip_mech,
                    norm_bound=norm_bound,
                    noise_mech=noise_mech,
                    network=network,
                    loss_fn=loss,
                    optimizer=net_opt,
                    metrics=None)
    ms_ds = ds.GeneratorDataset(dataset_generator(batch_size, batches),
                                ['data', 'label'])
    ms_ds.set_dataset_size(batch_size * batches)
    model.train(epochs, ms_ds, dataset_sink_mode=False)
    net_opt = nn.Momentum(params=network.trainable_params(),
                          learning_rate=cfg.lr,
                          momentum=cfg.momentum)
    # Create a monitor for DP training. The function of the monitor is to
    # compute and print the privacy budget(eps and delta) while training.
    rdp_monitor = PrivacyMonitorFactory.create(
        'rdp',
        num_samples=60000,
        batch_size=cfg.batch_size,
        initial_noise_multiplier=cfg.initial_noise_multiplier,
        per_print_times=234)
    # Create the DP model for training.
    model = DPModel(micro_batches=cfg.micro_batches,
                    norm_bound=cfg.norm_bound,
                    noise_mech=noise_mech,
                    network=network,
                    loss_fn=net_loss,
                    optimizer=net_opt,
                    metrics={"Accuracy": Accuracy()})

    LOGGER.info(TAG, "============== Starting Training ==============")
    model.train(cfg['epoch_size'],
                ds_train,
                callbacks=[ckpoint_cb, LossMonitor(), rdp_monitor],
                dataset_sink_mode=cfg.dataset_sink_mode)

    LOGGER.info(TAG, "============== Starting Testing ==============")
    ckpt_file_name = 'trained_ckpt_file/checkpoint_lenet-5_234.ckpt'
    param_dict = load_checkpoint(ckpt_file_name)
    load_param_into_net(network, param_dict)
    ds_eval = generate_mnist_dataset(os.path.join(cfg.data_path, 'test'),