Пример #1
0
    def send_components(self):
        if self.is_min_gen_enc:
            self.logger.debug("using min_gen_enc")

            self._compute_components()

            # enc_uB_overlap has shape (len(overlap_indexes), feature_dim)
            # enc_uB_overlap_2 has shape (len(overlap_indexes), feature_dim, feature_dim)
            enc_uB_overlap = distribute_encrypt_matrix(self.public_key,
                                                       self.uB_overlap)
            enc_uB_overlap_2 = distribute_encrypt_matmul_3(
                np.expand_dims(self.uB_overlap, axis=2),
                np.expand_dims(enc_uB_overlap, axis=1))

            # enc_mapping_comp_B has shape (len(overlap_indexes), feature_dim)
            scale_factor = np.tile(
                (-1 / self.feature_dim),
                (enc_uB_overlap.shape[0], enc_uB_overlap.shape[1]))
            enc_mapping_comp_B = distribute_compute_XY(enc_uB_overlap,
                                                       scale_factor)
            # enc_mapping_comp_B = enc_uB_overlap * (-1 / self.feature_dim)
            # enc_mapping_comp_B = encrypt_matrix(self.public_key, self.mapping_comp_B)

            return [
                enc_uB_overlap, enc_uB_overlap_2, enc_mapping_comp_B,
                self.uB_overlap
            ]  #New!!!just for testing difference with the plaintext(for loss)
        else:
            components = super(EncryptedFTLHostModel, self).send_components()
            return self.__encrypt_components(components)
Пример #2
0
    def _update_gradients(self):
        uB_overlap_ex = np.expand_dims(self.uB_overlap, axis=1)
        enc_uB_overlap_y_overlap_2_phi_2 = distribute_encrypt_matmul_3(
            uB_overlap_ex, self.enc_y_overlap_2_phi_2)
        enc_l1_grad_B = distribute_compute_X_plus_Y(
            np.squeeze(enc_uB_overlap_y_overlap_2_phi_2, axis=1),
            self.enc_y_overlap_phi)
        enc_loss_grad_B = distribute_compute_X_plus_Y(
            self.alpha * enc_l1_grad_B, self.enc_mapping_comp_A)

        self.loss_grads = enc_loss_grad_B
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X[self.overlap_indexes], enc_loss_grad_B)
    def test_encrypt_matmul_3_dim_3(self):

        X = np.array([[[1, 2, 3]], [[10, 11, 12]]], dtype=np.float64)
        Y = np.array([[[10, 11, 12], [13, 14, 15], [16, 17, 18]],
                      [[19, 20, 21], [22, 23, 24], [25, 26, 27]]],
                     dtype=np.float64)

        Z = np.matmul(X, Y)

        encrypt_Y = self.encrypt_3d_matrix(Y)
        res = distribute_encrypt_matmul_3(X, encrypt_Y)

        decrypt_res = decrypt_matrix(self.privatekey, res)
        assert_matrix(Z, decrypt_res)
Пример #4
0
    def __precompute(self):

        # compute a component of guest's loss gradient
        # enc_y_overlap_2_phi has shape (len(overlap_indexes), 1, feature_dim)
        # uB_overlap_2 has shape (len(overlap_indexes), feature_dim, feature_dim)
        enc_y_overlap_2_phi = np.expand_dims(np.tile(self.enc_phi, (len(self.overlap_indexes), 1)), axis=1)
        enc_y_overlap_2_phi_uB_overlap_2 = distribute_encrypt_matmul_3(enc_y_overlap_2_phi, self.uB_overlap_2)
        self.precomputed_grad_component = 0.25 * np.squeeze(enc_y_overlap_2_phi_uB_overlap_2, axis=1)

        # compute a component of guest's loss
        enc_phi_uB_overlap_2_phi = 0
        for uB_row in self.uB_overlap:
            uB_row = uB_row.reshape(1, -1)
            enc_phi_uB_overlap_2_phi += distribute_encrypt_matmul_2_ob(distribute_encrypt_matmul_2_ob(uB_row, self.enc_phi_2),
                                                                       uB_row.transpose())
        self.precomputed_loss_component = enc_phi_uB_overlap_2_phi
Пример #5
0
    def _update_gradients(self):

        # y_overlap_2 have shape (len(overlap_indexes), 1),
        # phi has shape (1, feature_dim),
        # y_overlap_2_phi has shape (len(overlap_indexes), 1, feature_dim)
        y_overlap_2_phi = np.expand_dims(self.y_overlap_2 * self.phi, axis=1)

        # uB_2_overlap has shape (len(overlap_indexes), feature_dim, feature_dim)
        enc_y_overlap_2_phi_uB_overlap_2 = distribute_encrypt_matmul_3(
            y_overlap_2_phi, self.enc_uB_overlap_2)
        enc_loss_grads_const_part1 = np.sum(
            0.25 * np.squeeze(enc_y_overlap_2_phi_uB_overlap_2, axis=1),
            axis=0)

        if self.is_trace:
            self.logger.debug("enc_y_overlap_2_phi_uB_overlap_2 shape" +
                              str(enc_y_overlap_2_phi_uB_overlap_2.shape))
            self.logger.debug("enc_loss_grads_const_part1 shape" +
                              str(enc_loss_grads_const_part1.shape))

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_loss_grads_const_part2 = distribute_compute_sum_XY(
            y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = enc_loss_grads_const_part1 - enc_loss_grads_const_part2
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const,
                                       (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes],
                                (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" +
                              str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" +
                              str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = distribute_compute_XY(
            self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = distribute_compute_XY_plus_Z(
            self.alpha * y_overlap / len(self.y), enc_const_overlap,
            self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" +
                              str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" +
                              str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])]
                           for _ in range(len(self.y))]
        # TODO: need more efficient way to do following task
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" +
                              str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)
Пример #6
0
 def __precompute(self):
     # compute a component of host's loss gradient
     enc_uB_overlap_ex = np.expand_dims(self.enc_uB_overlap, axis=1)
     enc_uB_overlap_y_overlap_2_phi_2 = distribute_encrypt_matmul_3(enc_uB_overlap_ex, self.y_overlap_2_phi_2)
     self.precomputed_component = np.squeeze(enc_uB_overlap_y_overlap_2_phi_2, axis=1)