Ejemplo n.º 1
0
 def compute_encrypted_params_grads(self, X, encrypt_grads):
     grads = self.compute_gradients(X)
     grads_W = grads[0]
     grads_b = grads[1]
     encrypt_grads_ex = np.expand_dims(encrypt_grads, axis=1)
     encrypt_grads_W = distribute_compute_sum_XY(encrypt_grads_ex, grads_W)
     encrypt_grads_b = distribute_compute_sum_XY(encrypt_grads, grads_b)
     return encrypt_grads_W, encrypt_grads_b
Ejemplo n.º 2
0
 def _update_loss(self):
     uA_overlap_prime = -self.uA_overlap / self.feature_dim
     enc_loss_overlap = np.sum(
         distribute_compute_sum_XY(uA_overlap_prime, self.enc_uB_overlap))
     enc_loss_y = self.__compute_encrypt_loss_y(self.enc_uB_overlap,
                                                self.enc_uB_overlap_2,
                                                self.y_overlap, self.phi)
     self.loss = self.alpha * enc_loss_y + enc_loss_overlap
Ejemplo n.º 3
0
    def __compute_encrypt_loss_y(self, enc_uB_overlap, y_overlap, phi):
        if self.host_public_key is not None and self.public_key != self.host_public_key:
            self.enc_phi_uB_overlap_2_phi = distribute_decrypt_matrix(self.private_key, self.enc_phi_uB_overlap_2_phi)

        enc_uB_phi = distribute_encrypt_matmul_2_ob(enc_uB_overlap, phi.transpose())
        enc_loss_y = (-0.5 * distribute_compute_sum_XY(y_overlap, enc_uB_phi)[0] + 1.0 / 8 * np.sum(
            self.enc_phi_uB_overlap_2_phi)) + len(y_overlap) * np.log(2)
        return enc_loss_y
Ejemplo n.º 4
0
 def __compute_encrypt_loss_y(self, enc_uB_overlap, enc_uB_overlap_2,
                              y_overlap, phi):
     enc_uB_phi = distribute_encrypt_matmul_2_ob(enc_uB_overlap,
                                                 phi.transpose())
     enc_uB_2 = np.sum(enc_uB_overlap_2, axis=0)
     enc_phi_uB_2_Phi = distribute_encrypt_matmul_2_ob(
         distribute_encrypt_matmul_2_ob(phi, enc_uB_2), phi.transpose())
     enc_loss_y = (
         -0.5 * distribute_compute_sum_XY(y_overlap, enc_uB_phi)[0] +
         1.0 / 8 * np.sum(enc_phi_uB_2_Phi)) + len(y_overlap) * np.log(2)
     return enc_loss_y
Ejemplo n.º 5
0
    def test_distributed_calculate_sum_XY(self):
        print("--- test_distributed_calculate_sum_XY ---")

        X = np.array([[1., 2., 3.],
                      [4., 5., 6.],
                      [7., 8., 9.]])

        Y = np.array([[1], [-1], [1]])

        actual_sum_XY = np.sum(X * Y, axis=0)
        sum_XY = distribute_compute_sum_XY(X, Y)
        assert_array(actual_sum_XY, sum_XY)
Ejemplo n.º 6
0
    def _update_gradients(self):
        # enc_y_overlap_2_phi_uB_overlap_2 was calculated by host
        if self.is_trace:
            self.logger.debug(
                "enc_y_overlap_2_phi_uB_overlap_2 shape" + str(self.enc_y_overlap_2_phi_uB_overlap_2.shape))

        if self.host_public_key is not None and self.public_key != self.host_public_key:
            # TODO: decrypt enc_y_overlap_2_phi_uB_overlap_2
            self.enc_y_overlap_2_phi_uB_overlap_2 = distribute_decrypt_matrix(self.private_key,
                                                                              self.enc_y_overlap_2_phi_uB_overlap_2)

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_y_overlap_uB_overlap = distribute_compute_sum_XY(y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = np.sum(self.enc_y_overlap_2_phi_uB_overlap_2, axis=0) - enc_y_overlap_uB_overlap
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const, (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes], (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" + str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" + str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = distribute_compute_XY(self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = distribute_compute_XY_plus_Z(self.alpha * y_overlap / len(self.y), enc_const_overlap,
                                                          self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" + str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" + str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])] for _ in range(len(self.y))]
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" + str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)
Ejemplo n.º 7
0
    def _update_loss(self):
        uA_overlap_prime = -self.uA_overlap / self.feature_dim
        enc_loss_overlap = np.sum(
            distribute_compute_sum_XY(uA_overlap_prime, self.enc_uB_overlap))
        enc_loss_y = self.__compute_encrypt_loss_y(self.enc_uB_overlap,
                                                   self.enc_uB_overlap_2,
                                                   self.y_overlap, self.phi)
        self.loss = self.alpha * enc_loss_y + enc_loss_overlap
        print("encloss:===")
        print(self.loss)

        #New!!!just for testing difference with the plaintext(for loss)
        loss_overlap = np.sum(uA_overlap_prime * self.uB_overlap)
        loss_y = self.__compute_loss_y(self.uB_overlap, self.y_overlap,
                                       self.phi)
        real_loss = self.alpha * loss_y + loss_overlap
        print("realloss:===")
        print(real_loss)
Ejemplo n.º 8
0
    def _update_gradients(self):

        # y_overlap_2 have shape (len(overlap_indexes), 1),
        # phi has shape (1, feature_dim),
        # y_overlap_2_phi has shape (len(overlap_indexes), 1, feature_dim)
        y_overlap_2_phi = np.expand_dims(self.y_overlap_2 * self.phi, axis=1)

        # uB_2_overlap has shape (len(overlap_indexes), feature_dim, feature_dim)
        enc_y_overlap_2_phi_uB_overlap_2 = distribute_encrypt_matmul_3(
            y_overlap_2_phi, self.enc_uB_overlap_2)
        enc_loss_grads_const_part1 = np.sum(
            0.25 * np.squeeze(enc_y_overlap_2_phi_uB_overlap_2, axis=1),
            axis=0)

        if self.is_trace:
            self.logger.debug("enc_y_overlap_2_phi_uB_overlap_2 shape" +
                              str(enc_y_overlap_2_phi_uB_overlap_2.shape))
            self.logger.debug("enc_loss_grads_const_part1 shape" +
                              str(enc_loss_grads_const_part1.shape))

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_loss_grads_const_part2 = distribute_compute_sum_XY(
            y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = enc_loss_grads_const_part1 - enc_loss_grads_const_part2
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const,
                                       (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes],
                                (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" +
                              str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" +
                              str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = distribute_compute_XY(
            self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = distribute_compute_XY_plus_Z(
            self.alpha * y_overlap / len(self.y), enc_const_overlap,
            self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" +
                              str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" +
                              str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])]
                           for _ in range(len(self.y))]
        # TODO: need more efficient way to do following task
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" +
                              str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)