Esempio n. 1
0
    def send_components(self):
        if self.is_min_gen_enc:
            self.logger.debug("using min_gen_enc")

            self._compute_components()

            # phi has shape (1, feature_dim)
            # phi_2 has shape (feature_dim, feature_dim)
            enc_phi = encryption.encrypt_matrix(self.public_key, self.phi)
            enc_phi_2 = distribute_encrypt_matmul_2_ob(self.phi.transpose(),
                                                       enc_phi)

            # enc_y_overlap_2_phi_2 = 0.25 * np.expand_dims(self.y_overlap_2, axis=2) * enc_phi_2
            # enc_y_overlap_phi = -0.5 * self.y_overlap * enc_phi
            enc_y_overlap_2_phi_2 = distribute_compute_XY(
                0.25 * np.expand_dims(self.y_overlap_2, axis=2),
                np.tile(enc_phi_2, (self.y_overlap_2.shape[0], 1, 1)))
            enc_y_overlap_phi = distribute_compute_XY(
                -0.5 * self.y_overlap,
                np.tile(enc_phi, (self.y_overlap.shape[0], 1)))
            enc_mapping_comp_A = distribute_encrypt_matrix(
                self.public_key, self.mapping_comp_A)

            return [
                enc_y_overlap_2_phi_2, enc_y_overlap_phi, enc_mapping_comp_A
            ]
        else:
            components = super(EncryptedFTLGuestModel, self).send_components()
            return self.__encrypt_components(components)
Esempio n. 2
0
    def send_components(self):
        if self.is_min_gen_enc:
            self.logger.debug("using min_gen_enc")

            self._compute_components()

            # enc_uB_overlap has shape (len(overlap_indexes), feature_dim)
            # enc_uB_overlap_2 has shape (len(overlap_indexes), feature_dim, feature_dim)
            enc_uB_overlap = distribute_encrypt_matrix(self.public_key,
                                                       self.uB_overlap)
            enc_uB_overlap_2 = distribute_encrypt_matmul_3(
                np.expand_dims(self.uB_overlap, axis=2),
                np.expand_dims(enc_uB_overlap, axis=1))

            # enc_mapping_comp_B has shape (len(overlap_indexes), feature_dim)
            scale_factor = np.tile(
                (-1 / self.feature_dim),
                (enc_uB_overlap.shape[0], enc_uB_overlap.shape[1]))
            enc_mapping_comp_B = distribute_compute_XY(enc_uB_overlap,
                                                       scale_factor)
            # enc_mapping_comp_B = enc_uB_overlap * (-1 / self.feature_dim)
            # enc_mapping_comp_B = encrypt_matrix(self.public_key, self.mapping_comp_B)

            return [
                enc_uB_overlap, enc_uB_overlap_2, enc_mapping_comp_B,
                self.uB_overlap
            ]  #New!!!just for testing difference with the plaintext(for loss)
        else:
            components = super(EncryptedFTLHostModel, self).send_components()
            return self.__encrypt_components(components)
Esempio n. 3
0
    def test_distributed_calculate_XY_2(self):
        print("--- test_distributed_calculate_XY_2 ---")
        # X has shape (4, 3, 3)
        X = np.random.rand(4, 3, 3)

        # Y has shape (4, 1, 1)
        Y = np.random.rand(4, 1, 1)

        actual_XY = X * Y
        print(actual_XY, actual_XY.shape)
        XY = distribute_compute_XY(X, Y)
        assert_matrix(actual_XY, XY)
Esempio n. 4
0
    def test_distributed_calculate_XY_1(self):
        print("--- test_distributed_calculate_XY_1 ---")
        # X has shape (4, 3)
        X = np.array([[1., 2., 3.],
                      [4., 5., 6.],
                      [7., 8., 9.],
                      [10, 11, 12]])

        # Y has shape (4, 1)
        Y = np.array([[2], [1], [-1], [1]])

        actual_XY = X * Y
        print(actual_XY, actual_XY.shape)
        XY = distribute_compute_XY(X, Y)
        assert_matrix(actual_XY, XY)
Esempio n. 5
0
    def _update_gradients(self):
        # enc_y_overlap_2_phi_uB_overlap_2 was calculated by host
        if self.is_trace:
            self.logger.debug(
                "enc_y_overlap_2_phi_uB_overlap_2 shape" + str(self.enc_y_overlap_2_phi_uB_overlap_2.shape))

        if self.host_public_key is not None and self.public_key != self.host_public_key:
            # TODO: decrypt enc_y_overlap_2_phi_uB_overlap_2
            self.enc_y_overlap_2_phi_uB_overlap_2 = distribute_decrypt_matrix(self.private_key,
                                                                              self.enc_y_overlap_2_phi_uB_overlap_2)

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_y_overlap_uB_overlap = distribute_compute_sum_XY(y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = np.sum(self.enc_y_overlap_2_phi_uB_overlap_2, axis=0) - enc_y_overlap_uB_overlap
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const, (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes], (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" + str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" + str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = distribute_compute_XY(self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = distribute_compute_XY_plus_Z(self.alpha * y_overlap / len(self.y), enc_const_overlap,
                                                          self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" + str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" + str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])] for _ in range(len(self.y))]
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" + str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)
Esempio n. 6
0
    def _update_gradients(self):

        # y_overlap_2 have shape (len(overlap_indexes), 1),
        # phi has shape (1, feature_dim),
        # y_overlap_2_phi has shape (len(overlap_indexes), 1, feature_dim)
        y_overlap_2_phi = np.expand_dims(self.y_overlap_2 * self.phi, axis=1)

        # uB_2_overlap has shape (len(overlap_indexes), feature_dim, feature_dim)
        enc_y_overlap_2_phi_uB_overlap_2 = distribute_encrypt_matmul_3(
            y_overlap_2_phi, self.enc_uB_overlap_2)
        enc_loss_grads_const_part1 = np.sum(
            0.25 * np.squeeze(enc_y_overlap_2_phi_uB_overlap_2, axis=1),
            axis=0)

        if self.is_trace:
            self.logger.debug("enc_y_overlap_2_phi_uB_overlap_2 shape" +
                              str(enc_y_overlap_2_phi_uB_overlap_2.shape))
            self.logger.debug("enc_loss_grads_const_part1 shape" +
                              str(enc_loss_grads_const_part1.shape))

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_loss_grads_const_part2 = distribute_compute_sum_XY(
            y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = enc_loss_grads_const_part1 - enc_loss_grads_const_part2
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const,
                                       (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes],
                                (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" +
                              str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" +
                              str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = distribute_compute_XY(
            self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = distribute_compute_XY_plus_Z(
            self.alpha * y_overlap / len(self.y), enc_const_overlap,
            self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" +
                              str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" +
                              str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])]
                           for _ in range(len(self.y))]
        # TODO: need more efficient way to do following task
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" +
                              str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)