Beispiel #1
0
    def elementwise_sub(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        expected_out = kwargs['expect_results'][role]

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[5], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[5], dtype='int64')
        op_sub = pfl_mpc.layers.elementwise_sub(x=x, y=y)
        math_sub = x - y
        exe = fluid.Executor(place=fluid.CPUPlace())
        sub_results = exe.run(feed={
            'x': d_1,
            'y': d_2
        },
                              fetch_list=[op_sub, math_sub])

        self.assertTrue(np.allclose(sub_results[0], sub_results[1]))
        self.assertEqual(sub_results[0].shape, (2, 5))
        self.assertTrue(np.allclose(sub_results[0], expected_out))
def load_mpc_model_and_predict(role, ip, server, port, mpc_model_dir,
                               mpc_model_filename):
    """
    Predict based on MPC inference model, save prediction results into files.

    """
    place = fluid.CPUPlace()
    exe = fluid.Executor(place)

    # Step 1. initialize MPC environment and load MPC model to predict
    pfl_mpc.init(mpc_protocol_name, role, ip, server, port)
    infer_prog, feed_names, fetch_targets = mpc_du.load_mpc_model(
        exe=exe,
        mpc_model_dir=mpc_model_dir,
        mpc_model_filename=mpc_model_filename,
        inference=True)
    # Step 2. MPC predict
    batch_size = network.BATCH_SIZE
    feature_file = "/tmp/house_feature"
    feature_shape = (13, )
    pred_file = "./tmp/uci_prediction.part{}".format(role)
    loader = process_data.get_mpc_test_dataloader(feature_file, feature_shape,
                                                  role, batch_size)
    start_time = time.time()
    for sample in loader():
        prediction = exe.run(program=infer_prog,
                             feed={feed_names[0]: np.array(sample)},
                             fetch_list=fetch_targets)
        # Step 3. save prediction results
        with open(pred_file, 'ab') as f:
            f.write(np.array(prediction).tostring())
        break
    end_time = time.time()
    print('Mpc Predict with samples of {}, cost time in seconds:{}'.format(
        batch_size, (end_time - start_time)))
Beispiel #3
0
        def closure(**kwargs):
            role = kwargs['role']

            pfl_mpc.init("aby3", role, "localhost", self.server,
                         int(self.port))

            #init_op = fluid.default_main_program().global_block().ops[0]

            #_insert_init_op(program, init_op)

            executor = Executor(place)

            executor.run()
            outs = executor.run(prog,
                                feed=feed_dict,
                                fetch_list=fetch_list,
                                return_numpy=False)
            # append lod information in last position
            lod = []
            for idx in range(len(fetch_list)):
                return_results[idx].append(np.array(outs[idx]))
                lod_i = outs[idx].lod()
                lod_concat = []
                for i in lod_i:
                    lod_concat.append(i)
                lod.append(lod_concat)
            return_results[len(fetch_list)].append(lod)
Beispiel #4
0
    def lt(self, **kwargs):
        """
        Less than.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        expected_out = kwargs['expect_results'][role]

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[3], dtype='int64')
        y = fluid.data(name='y', shape=[3], dtype='float32')
        op_lt = pfl_mpc.layers.less_than(x=x, y=y)
        math_lt = x < y
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={
            'x': d_1,
            'y': d_2
        },
                          fetch_list=[op_lt, math_lt])

        self.assertTrue(np.allclose(results[0], results[1]))
        self.assertEqual(results[0].shape, (3, ))
        self.assertTrue(np.allclose(results[0], expected_out))
Beispiel #5
0
    def precision_recall(self, **kwargs):
        """
        precision_recall op ut
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        preds = kwargs['preds']
        labels = kwargs['labels']
        loop = kwargs['loop']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=self.input_size, dtype='int64')
        y = pfl_mpc.data(name='y', shape=self.input_size, dtype='int64')
        out0, out1 = pfl_mpc.layers.precision_recall(input=x,
                                                     label=y,
                                                     threshold=self.threshold)
        exe = fluid.Executor(place=fluid.CPUPlace())
        exe.run(fluid.default_startup_program())
        for i in range(loop):
            batch_res, acc_res = exe.run(feed={
                'x': preds[i],
                'y': labels[i]
            },
                                         fetch_list=[out0, out1])

        self.assertTrue(
            np.allclose(batch_res * (2**-16), self.exp_res[0][:3], atol=1e-4))
        self.assertTrue(
            np.allclose(acc_res * (2**-16), self.exp_res[0][3:], atol=1e-4))
Beispiel #6
0
    def sum(self, **kwargs):
        """
        Test normal case.
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        d_3 = kwargs['data_3'][role]
        expected_out = kwargs['expect_results'][role]

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='data_1', shape=[4], dtype='int64')
        data_2 = pfl_mpc.data(name='data_2', shape=[4], dtype='int64')
        data_3 = pfl_mpc.data(name='data_3', shape=[4], dtype='int64')
        op_sum = pfl_mpc.layers.sum([data_1, data_2, data_3])
        math_sum = data_1 + data_2 + data_3
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={
            'data_1': d_1,
            'data_2': d_2,
            'data_3': d_3
        },
                          fetch_list=[op_sum, math_sum])

        self.assertTrue(np.allclose(results[0], results[1]))
        self.assertEqual(results[0].shape, (2, 4))
        self.assertTrue(np.allclose(results[0], expected_out))
Beispiel #7
0
    def encrypted_data_generator(data_location_party, sample_reader, index1,
                                 index2_begin, index2_end):
        feature_num = index2_end - index2_begin
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            pfl_mpc.init("aby3", int(role), "localhost", server, int(port))
            input = fluid.data(name='input',
                               shape=[feature_num],
                               dtype='float32')
            out = pfl_mpc.layers.share(input, party_id=data_location_party)

            place = fluid.CPUPlace()
            exe = fluid.Executor(place)
            exe.run(fluid.default_startup_program())
            for instance in sample_reader():
                if role == data_location_party:
                    feed_data = np.array(
                        instance[index1][index2_begin:index2_end],
                        dtype='float32')
                else:
                    feed_data = np.zeros(shape=(feature_num, ),
                                         dtype='float32')  #dummy_data
                out_share = exe.run(feed={'input': feed_data},
                                    fetch_list=[out])
                yield np.array(out_share)
Beispiel #8
0
    def elementwise_add(self, **kwargs):
        """
        Add two variables with one dimension.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        expected_out = kwargs['expect_results'][role]

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[4], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[4], dtype='int64')
        op_add = pfl_mpc.layers.elementwise_add(x=x, y=y)
        math_add = x + y
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={
            'x': d_1,
            'y': d_2
        },
                          fetch_list=[op_add, math_add])

        self.assertTrue(np.allclose(results[0], results[1]))
        self.assertEqual(results[0].shape, (2, 4))
        self.assertTrue(np.allclose(results[0], expected_out))
Beispiel #9
0
    def fc(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("privc", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='data_1', shape=[3, 2], dtype='int64')
        out = pfl_mpc.layers.fc(
            input=data_1,
            size=1,
            num_flatten_dims=-1,
            param_attr=fluid.ParamAttr(
                initializer=fluid.initializer.ConstantInitializer(
                    1 * 2**scaling_factor / 2)))  # init 1
        exe = fluid.Executor(place=fluid.CPUPlace())
        exe.run(fluid.default_startup_program())
        results = exe.run(feed={'data_1': d_1}, fetch_list=[out])

        self.assertEqual(results[0].shape, (3, 1))
        return_results.append(results[0])
Beispiel #10
0
    def batch_norm(self, **kwargs):
        """
        Add two variables with one dimension.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[2, 3], dtype='int64')

        param_attr = fluid.ParamAttr(
            name='batch_norm_w',
            initializer=fluid.initializer.ConstantInitializer(value=21845))
        bias_attr = fluid.ParamAttr(
            name='batch_norm_b',
            initializer=fluid.initializer.ConstantInitializer(value=0))
        bn_out = pfl_mpc.layers.batch_norm(input=x,
                                           param_attr=param_attr,
                                           bias_attr=bias_attr)

        exe = fluid.Executor(place=fluid.CPUPlace())
        exe.run(fluid.default_startup_program())
        results = exe.run(feed={'x': d_1}, fetch_list=[bn_out])

        self.assertEqual(results[0].shape, (2, 2, 3))
        return_results.append(results[0])
Beispiel #11
0
    def mean_normalize(self, **kwargs):
        """
        mean_normalize op ut
        :param kwargs:
        :return:
        """
        role = kwargs['role']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))

        mi = pfl_mpc.data(name='mi', shape=self.input_size, dtype='int64')
        ma = pfl_mpc.data(name='ma', shape=self.input_size, dtype='int64')
        me = pfl_mpc.data(name='me', shape=self.input_size, dtype='int64')
        sn = pfl_mpc.data(name='sn', shape=self.input_size[:-1], dtype='int64')

        out0, out1 = pfl_mpc.layers.mean_normalize(f_min=mi,
                                                   f_max=ma,
                                                   f_mean=me,
                                                   sample_num=sn)

        exe = fluid.Executor(place=fluid.CPUPlace())

        f_range, f_mean = exe.run(feed={
            'mi': kwargs['min'],
            'ma': kwargs['max'],
            'me': kwargs['mean'],
            'sn': kwargs['sample_num']
        },
                                  fetch_list=[out0, out1])

        self.f_range_list.append(f_range)
        self.f_mean_list.append(f_mean)
Beispiel #12
0
        def closure(**kwargs):
            role = kwargs['role']

            pfl_mpc.init("aby3", role, "localhost", self.server,
                         int(self.port))
            loss = append_loss_ops(block, output_names)
            param_grad_list = append_backward(loss=loss,
                                              parameter_list=input_to_check,
                                              no_grad_set=no_grad_set)

            inputs = self._get_inputs(block)
            feed_dict = self.feed_var(inputs, place)

            fetch_list = [g for p, g in param_grad_list]

            executor = Executor(place)

            executor.run()
            outs = executor.run(prog,
                                feed=feed_dict,
                                fetch_list=fetch_list,
                                return_numpy=False)
            # append lod information in last position
            lod = []
            for idx in range(fetch_list_len):
                return_results[idx].append(np.array(outs[idx]))
                lod_i = outs[idx].lod()
                lod_concat = []
                for i in lod_i:
                    lod_concat.append(i)
                lod.append(lod_concat)
            return_results[fetch_list_len].append(lod)
Beispiel #13
0
    def mat_mul2(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("privc", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='data_1', shape=[2, 3, 4, 5], dtype='int64')
        data_2 = pfl_mpc.data(name='data_2', shape=[5, 4, 3, 2], dtype='int64')
        out = pfl_mpc.layers.mul(data_1,
                                 data_2,
                                 x_num_col_dims=2,
                                 y_num_col_dims=2)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={
            'data_1': d_1,
            'data_2': d_2
        },
                          fetch_list=[out])

        self.assertEqual(results[0].shape, (2, 3, 3, 2))
        return_results.append(results[0])
Beispiel #14
0
def encrypt_model_and_train(role, ip, server, port, model_save_dir,
                            model_filename):
    """
    Load uci network and train MPC model.

    """
    place = fluid.CPUPlace()
    exe = fluid.Executor(place)

    # Step 1. Initialize MPC environment and load paddle model network and initialize parameter.
    pfl_mpc.init("aby3", role, ip, server, port)
    [_, _, _, loss] = network.uci_network()
    exe.run(fluid.default_startup_program())

    # Step 2. TRANSPILE: encrypt default_main_program into MPC program
    aby3.transpile()

    # Step 3. MPC-TRAINING: model training based on MPC program.
    mpc_data_dir = "../mpc_data/"
    feature_file = mpc_data_dir + "house_feature"
    feature_shape = (13, )
    label_file = mpc_data_dir + "house_label"
    label_shape = (1, )
    if not os.path.exists('./tmp'):
        os.makedirs('./tmp')
    loss_file = "./tmp/uci_mpc_loss.part{}".format(role)
    if os.path.exists(loss_file):
        os.remove(loss_file)
    batch_size = network.UCI_BATCH_SIZE
    epoch_num = network.TRAIN_EPOCH
    feature_name = 'x'
    label_name = 'y'
    loader = process_data.get_mpc_dataloader(feature_file, label_file,
                                             feature_shape, label_shape,
                                             feature_name, label_name, role,
                                             batch_size)
    start_time = time.time()
    for epoch_id in range(epoch_num):
        step = 0
        for sample in loader():
            mpc_loss = exe.run(feed=sample, fetch_list=[loss.name])
            if step % 50 == 0:
                print('Epoch={}, Step={}, Loss={}'.format(
                    epoch_id, step, mpc_loss))
                with open(loss_file, 'ab') as f:
                    f.write(np.array(mpc_loss).tostring())
                step += 1
    end_time = time.time()
    print('Mpc Training of Epoch={} Batch_size={}, cost time in seconds:{}'.
          format(epoch_num, batch_size, (end_time - start_time)))

    # Step 4. SAVE trained MPC model as a trainable model.
    aby3.save_trainable_model(exe=exe,
                              model_dir=model_save_dir,
                              model_filename=model_filename)
    print('Successfully save mpc trained model into:{}'.format(model_save_dir))
Beispiel #15
0
def infer(args):
    """
    infer
    """
    logger.info('Start inferring...')
    begin = time.time()
    place = fluid.CUDAPlace(0) if args.use_gpu else fluid.CPUPlace()
    exe = fluid.Executor(place)
    cur_model_path = os.path.join(args.model_dir, 'mpc_model', 'epoch_' + str(args.test_epoch),
                                  'checkpoint', 'party_{}'.format(args.role))

    with fluid.scope_guard(fluid.Scope()):
        pfl_mpc.init('aby3', args.role, 'localhost', args.server, args.port)
        infer_program, feed_target_names, fetch_vars = aby3.load_mpc_model(exe=exe,
                                                                    mpc_model_dir=cur_model_path,
                                                                    mpc_model_filename='__model__',
                                                                    inference=True)
        mpc_data_dir = args.mpc_data_dir
        user_vec_filepath = mpc_data_dir + 'user_vec'
        user_vec_part_filepath = user_vec_filepath + '.part{}'.format(args.role)

        sample_batch = args.batch_size
        watch_vecs = []
        search_vecs = []
        other_feats = []

        watch_vec_reader = read_share(file=mpc_data_dir + 'watch_vec', shape=(sample_batch, args.watch_vec_size))
        for vec in watch_vec_reader():
            watch_vecs.append(vec)
        search_vec_reader = read_share(file=mpc_data_dir + 'search_vec', shape=(sample_batch, args.search_vec_size))
        for vec in search_vec_reader():
            search_vecs.append(vec)
        other_feat_reader = read_share(file=mpc_data_dir + 'other_feat', shape=(sample_batch, args.other_feat_size))
        for vec in other_feat_reader():
            other_feats.append(vec)

        if os.path.exists(user_vec_part_filepath):
            os.system('rm -rf ' + user_vec_part_filepath)

        for i in range(args.batch_num):
            l3 = exe.run(infer_program,
                         feed={
                               'watch_vec': watch_vecs[i],
                               'search_vec': search_vecs[i],
                               'other_feat': other_feats[i],
                         },
                         return_numpy=True,
                         fetch_list=fetch_vars)

            with open(user_vec_part_filepath, 'ab+') as f:
                f.write(np.array(l3[0]).tostring())


    end = time.time()
    logger.info('MPC inferring, cost_time: {:.5f}s'.format(end - begin))
    logger.info('End inferring.')
Beispiel #16
0
def load_uci_update(role, ip, server, port, mpc_model_dir, mpc_model_filename, updated_model_dir):
    """
    Load, update and save uci MPC model.

    """
    place = fluid.CPUPlace()
    exe = fluid.Executor(place)

    # Step 1. initialize MPC environment and load MPC model into default_main_program to update.
    pfl_mpc.init("aby3", role, ip, server, port)
    aby3.load_mpc_model(exe=exe,
                        mpc_model_dir=mpc_model_dir,
                        mpc_model_filename=mpc_model_filename)

    # Step 2. MPC update
    epoch_num = network.MPC_UPDATE_EPOCH
    batch_size = network.BATCH_SIZE
    mpc_data_dir = "../mpc_data/"
    feature_file = mpc_data_dir + "house_feature"
    feature_shape = (13,)
    label_file = mpc_data_dir + "house_label"
    label_shape = (1,)
    loss_file = "./tmp/uci_mpc_loss.part{}".format(role)
    if os.path.exists(loss_file):
        os.remove(loss_file)
    updated_model_name = 'mpc_updated_model'
    feature_name = 'x'
    label_name = 'y'
    # fetch loss if needed
    loss = fluid.default_main_program().global_block().var('mean_0.tmp_0')
    loader = process_data.get_mpc_dataloader(feature_file, label_file, feature_shape, label_shape,
                                         feature_name, label_name, role, batch_size)
    start_time = time.time()
    for epoch_id in range(epoch_num):
        step = 0
        for sample in loader():
            mpc_loss = exe.run(feed=sample, fetch_list=[loss.name])
            if step % 50 == 0:
                print('Epoch={}, Step={}, Loss={}'.format(epoch_id, step, mpc_loss))
                with open(loss_file, 'ab') as f:
                    f.write(np.array(mpc_loss).tostring())
                step += 1
    end_time = time.time()
    print('Mpc Updating of Epoch={} Batch_size={}, cost time in seconds:{}'
          .format(epoch_num, batch_size, (end_time - start_time)))

    # Step 3. save updated MPC model as a trainable model.
    aby3.save_trainable_model(exe=exe,
                              model_dir=updated_model_dir,
                              model_filename=updated_model_name)
    print('Successfully save mpc updated model into:{}'.format(updated_model_dir))
Beispiel #17
0
            def closure(**kwargs):
                role = kwargs['role']

                pfl_mpc.init("privc", role, "localhost", self.server,
                             int(self.port))

                executor = Executor(place)

                executor.run()
                op.run(scope, place)

                for name in output_names:
                    out = np.array(scope.find_var(name).get_tensor())
                    return_results[name].append(out)
Beispiel #18
0
        def closure(**kwargs):
            role = kwargs['role']

            pfl_mpc.init("privc", role, "localhost", self.server,
                         int(self.port))

            #init_op = fluid.default_main_program().global_block().ops[0]

            #_insert_init_op(program, init_op)

            executor = Executor(place)

            executor.run()
            outs = executor.run(prog, feed=feed_dict, fetch_list=fetch_list)

            for idx in range(len(fetch_list)):
                return_results[idx].append(outs[idx])
Beispiel #19
0
    def square(self, **kwargs):
        """
        Square.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='x', shape=[2, 2], dtype='int64')
        op_square = pfl_mpc.layers.square(data_1)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1}, fetch_list=[op_square])

        self.assertEqual(results[0].shape, (2, 2, 2))
        return_results.append(results[0])
Beispiel #20
0
    def multi_dim_add(self, **kwargs):
        """
        Add two variables with multi dimensions.
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        expected_out = kwargs['expect_results'][role]

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[2, 2], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[2, 2], dtype='int64')
        add = x + y
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[add])

        self.assertTrue(np.allclose(results[0], expected_out))
Beispiel #21
0
    def mean(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("privc", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='data_1', shape=[3, 2], dtype='int64')
        out = pfl_mpc.layers.mean(data_1)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'data_1': d_1}, fetch_list=[out])

        self.assertEqual(results[0].shape, (1, ))
        return_results.append(results[0])
Beispiel #22
0
    def reduce_sum(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """

        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='x', shape=[3, 4], dtype='int64')
        op_reduce_sum = pfl_mpc.layers.reduce_sum(data_1, [1, 2], keep_dim=True)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1}, fetch_list=[op_reduce_sum])

        self.assertEqual(results[0].shape, (2, 1, 1))
        return_results.append(results[0])
Beispiel #23
0
    def multi_dim_mul(self, **kwargs):
        """
        Add two variables with multi dimensions.
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[2, 2], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[2, 2], dtype='int64')
        math_mul = x * y
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[math_mul])

        self.assertEqual(results[0].shape, (2, 2, 2))
        return_results.append(results[0])
    def square_error_cost(self, **kwargs):
        """
        Normal case.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        data_1 = pfl_mpc.data(name='data_1', shape=[2, 2], dtype='int64')
        data_2 = pfl_mpc.data(name='data_2', shape=[2, 2], dtype='int64')
        cost = pfl_mpc.layers.square_error_cost(input=data_1, label=data_2)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'data_1': d_1, 'data_2': d_2}, fetch_list=[cost])

        self.assertEqual(results[0].shape, (2, 2, 2))
        return_results.append(results[0])
Beispiel #25
0
    def diff_dim_mul_mid(self, **kwargs):
        """
        Add with different dimensions.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[3, 4, 2], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[3, 4], dtype='int64')
        math_mul = pfl_mpc.layers.elementwise_mul(x, y, axis=0)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[math_mul])

        self.assertEqual(results[0].shape, (2, 3, 4, 2))
        return_results.append(results[0])
Beispiel #26
0
    def elementwise_mul(self, **kwargs):
        """
        Add two variables with one dimension.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[4], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[4], dtype='int64')
        op_mul = pfl_mpc.layers.elementwise_mul(x=x, y=y)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[op_mul])

        self.assertEqual(results[0].shape, (2, 4))
        return_results.append(results[0])
Beispiel #27
0
    def pool2d(self, **kwargs):
        """
        Add two variables with one dimension.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[1, 1, 4, 6], dtype='int64')

        pool_out = pfl_mpc.layers.pool2d(input=x, pool_size=2, pool_stride=2)

        exe = fluid.Executor(place=fluid.CPUPlace())
        #exe.run(fluid.default_startup_program())
        results = exe.run(feed={'x': d_1}, fetch_list=[pool_out])

        self.assertEqual(results[0].shape, (2, 1, 1, 2, 3))
        return_results.append(results[0])
    def softmax_with_cross_entropy(self, **kwargs):
        """
        Add two variables with one dimension.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[2], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[2], dtype='int64')
        cost, softmax = pfl_mpc.layers.softmax_with_cross_entropy(
            x, y, soft_label=True, return_softmax=True)
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[softmax])

        self.assertEqual(results[0].shape, (2, 2))
        return_results.append(results[0])
Beispiel #29
0
    def mul(self, **kwargs):
        """
        Mul.
        :param kwargs:
        :return:
        """
        role = kwargs['role']
        d_1 = kwargs['data_1'][role]
        d_2 = kwargs['data_2'][role]
        return_results = kwargs['return_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))
        x = pfl_mpc.data(name='x', shape=[2, 2], dtype='int64')
        y = pfl_mpc.data(name='y', shape=[2, 2], dtype='int64')
        op_mul = pfl_mpc.layers.mul(x=x, y=y)
        # math_mul = data_1 * data_2
        exe = fluid.Executor(place=fluid.CPUPlace())
        results = exe.run(feed={'x': d_1, 'y': d_2}, fetch_list=[op_mul])

        self.assertEqual(results[0].shape, (2, 2, 2))
        return_results.append(results[0])
Beispiel #30
0
    def embedding_op(self, **kwargs):
        role = kwargs['role']
        #data = kwargs['data']
        data_normal = kwargs['data_normal']
        data_share = kwargs['data_share'][role]

        w_data = kwargs['w_data']
        w_data_share = kwargs['w_data_share'][role]
        return_results = kwargs['return_results']
        expected_result = kwargs['expect_results']

        pfl_mpc.init("aby3", role, "localhost", self.server, int(self.port))

        w_param_attrs = fluid.ParamAttr(name='emb_weight',
                                        learning_rate=0.5,
                                        initializer=pfl_mpc.initializer.NumpyArrayInitializer(w_data_share),
                                        trainable=True)
        w_param_attrs1 = fluid.ParamAttr(name='emb_weight1',
                                        learning_rate=0.5,
                                        initializer=fluid.initializer.NumpyArrayInitializer(w_data),
                                        trainable=True)
        input_shape = np.delete(data_share.shape, 0, 0)
        data1 = pfl_mpc.data(name='input', shape=input_shape, dtype='int64')
        data2 = fluid.data(name='input1', shape=data_normal.shape, dtype='int64')

        math_embedding = fluid.input.embedding(input=data2, size=w_data.shape, param_attr=w_param_attrs1, dtype='float32')

        op_embedding = pfl_mpc.input.embedding(input=data1, size=(input_shape[1],input_shape[0]), param_attr=w_param_attrs, dtype='int64')

        exe = fluid.Executor(place=fluid.CPUPlace())
        exe.run(fluid.default_startup_program())

        results = exe.run(feed={'input': data_share, 'input1': data_normal}, fetch_list=[op_embedding, math_embedding])

        return_results.append(results[0])
        expected_result.append(results[1])