Exemple #1
0
 def compute_encrypted_params_grads(self, X, encrypt_grads):
     grads = self.compute_gradients(X)
     grads_W = grads[0]
     grads_b = grads[1]
     encrypt_grads_ex = np.expand_dims(encrypt_grads, axis=1)
     encrypt_grads_W = compute_sum_XY(encrypt_grads_ex, grads_W)
     encrypt_grads_b = compute_sum_XY(encrypt_grads, grads_b)
     return encrypt_grads_W, encrypt_grads_b
Exemple #2
0
    def _update_gradients(self):
        # enc_y_overlap_2_phi_uB_overlap_2 was calculated by host
        if self.is_trace:
            self.logger.debug("enc_y_overlap_2_phi_uB_overlap_2 shape" +
                              str(self.enc_y_overlap_2_phi_uB_overlap_2.shape))

        if self.host_public_key is not None and self.public_key != self.host_public_key:
            # TODO: decrypt enc_y_overlap_2_phi_uB_overlap_2
            self.enc_y_overlap_2_phi_uB_overlap_2 = decrypt_matrix(
                self.private_key, self.enc_y_overlap_2_phi_uB_overlap_2)

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_y_overlap_uB_overlap = compute_sum_XY(y_overlap * 0.5,
                                                  self.enc_uB_overlap)

        enc_const = np.sum(self.enc_y_overlap_2_phi_uB_overlap_2,
                           axis=0) - enc_y_overlap_uB_overlap
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const,
                                       (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes],
                                (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" +
                              str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" +
                              str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = compute_XY(
            self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = compute_XY_plus_Z(
            self.alpha * y_overlap / len(self.y), enc_const_overlap,
            self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" +
                              str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" +
                              str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])]
                           for _ in range(len(self.y))]
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" +
                              str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)
Exemple #3
0
 def _update_loss(self):
     uA_overlap_prime = -self.uA_overlap / self.feature_dim
     enc_loss_overlap = np.sum(
         compute_sum_XY(uA_overlap_prime, self.enc_uB_overlap))
     enc_loss_y = self.__compute_encrypt_loss_y(self.enc_uB_overlap,
                                                self.y_overlap, self.phi)
     self.loss = self.alpha * enc_loss_y + enc_loss_overlap
Exemple #4
0
 def __compute_encrypt_loss_y(self, enc_uB_overlap, enc_uB_overlap_2, y_overlap, phi):
     enc_uB_phi = encrypt_matmul_2_ob(enc_uB_overlap, phi.transpose())
     enc_uB_2 = np.sum(enc_uB_overlap_2, axis=0)
     enc_phi_uB_2_Phi = encrypt_matmul_2_ob(encrypt_matmul_2_ob(phi, enc_uB_2), phi.transpose())
     enc_loss_y = (-0.5 * compute_sum_XY(y_overlap, enc_uB_phi)[0] + 1.0 / 8 * np.sum(enc_phi_uB_2_Phi)) + len(
         y_overlap) * np.log(2)
     return enc_loss_y
Exemple #5
0
    def test_distributed_calculate_sum_XY(self):
        print("--- test_distributed_calculate_sum_XY ---")

        X = np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.]])

        Y = np.array([[1], [-1], [1]])

        actual_sum_XY = np.sum(X * Y, axis=0)
        sum_XY = compute_sum_XY(X, Y)
        assert_array(actual_sum_XY, sum_XY)
Exemple #6
0
    def __compute_encrypt_grads(self, grads, encrypt_grads):

        grads_W = grads[0]
        grads_b = grads[1]
        encrypt_grads_ex = np.expand_dims(encrypt_grads, axis=1)

        if self.is_trace:
            self.logger.info("grads_W shape" + str(grads_W.shape))
            self.logger.info("grads_b shape" + str(grads_b.shape))
            self.logger.info("encrypt_grads_ex shape" + str(encrypt_grads_ex.shape))

        encrypt_grads_W = compute_sum_XY(encrypt_grads_ex, grads_W)
        encrypt_grads_b = compute_sum_XY(encrypt_grads, grads_b)

        if self.is_trace:
            self.logger.info("encrypt_grads_W shape" + str(encrypt_grads_W.shape))
            self.logger.info("encrypt_grads_b shape" + str(encrypt_grads_b.shape))

        return encrypt_grads_W, encrypt_grads_b
Exemple #7
0
    def __compute_encrypt_loss_y(self, enc_uB_overlap, y_overlap, phi):
        if self.host_public_key is not None and self.public_key != self.host_public_key:
            self.enc_phi_uB_overlap_2_phi = decrypt_matrix(
                self.private_key, self.enc_phi_uB_overlap_2_phi)

        enc_uB_phi = encrypt_matmul_2_ob(enc_uB_overlap, phi.transpose())
        enc_loss_y = (-0.5 * compute_sum_XY(y_overlap, enc_uB_phi)[0] +
                      1.0 / 8 * np.sum(self.enc_phi_uB_overlap_2_phi)
                      ) + len(y_overlap) * np.log(2)
        return enc_loss_y
Exemple #8
0
    def _update_gradients(self):

        # y_overlap2 have shape (len(overlap_indexes), 1),
        # y_A_u_A has shape (1, feature_dim),
        # y_overlap2_y_A_u_A has shape (len(overlap_indexes), 1, feature_dim)
        y_overlap2_y_A_u_A = np.expand_dims(self.y_overlap2 * self.y_A_u_A, axis=1)

        # U_B_2_overlap has shape (len(overlap_indexes), feature_dim, feature_dim)
        # tmp has shape (len(overlap_indexes), feature_dim)
        tmp1 = encrypt_matmul_3(y_overlap2_y_A_u_A, self.U_B_2_overlap)
        tmp2 = 0.25 * np.squeeze(tmp1, axis=1)

        if self.is_trace:
            self.logger.info("tmp1 shape" + str(tmp1.shape))
            self.logger.info("tmp2 shape" + str(tmp2.shape))

        y_overlap = np.tile(self.y_overlap, (1, self.U_B_overlap.shape[-1]))
        tmp3 = compute_sum_XY(y_overlap * 0.5, self.U_B_overlap)

        # TODO: do sum effectively
        encrypt_const = np.sum(tmp2, axis=0) - tmp3
        encrypt_const_overlap = np.tile(encrypt_const, (len(self.overlap_indexes), 1))
        encrypt_const_nonoverlap = np.tile(encrypt_const, (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes], (1, self.U_B_overlap.shape[-1]))

        if self.is_trace:
            self.logger.info("encrypt_const shape:" + str(encrypt_const.shape))
            self.logger.info("encrypt_const_overlap shape" + str(encrypt_const_overlap.shape))
            self.logger.info("encrypt_const_nonoverlap shape" + str(encrypt_const_nonoverlap.shape))
            self.logger.info("y_non_overlap shape" + str(y_non_overlap.shape))

        encrypt_grad_A_nonoverlap = compute_XY(self.alpha * y_non_overlap / len(self.y), encrypt_const_nonoverlap)
        encrypt_grad_A_overlap = compute_XY_plus_Z(self.alpha * y_overlap / len(self.y), encrypt_const_overlap, self.mapping_comp_B)

        if self.is_trace:
            self.logger.info("encrypt_grad_A_nonoverlap shape" + str(encrypt_grad_A_nonoverlap.shape))
            self.logger.info("encrypt_grad_A_overlap shape" + str(encrypt_grad_A_overlap.shape))

        encrypt_grad_loss_A = [[0 for _ in range(self.U_B_overlap.shape[1])] for _ in range(len(self.y))]
        # TODO: need more efficient way to do following task
        for i, j in enumerate(self.non_overlap_indexes):
            encrypt_grad_loss_A[j] = encrypt_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            encrypt_grad_loss_A[j] = encrypt_grad_A_overlap[i]

        encrypt_grad_loss_A = np.array(encrypt_grad_loss_A)

        if self.is_trace:
            self.logger.info("encrypt_grad_loss_A shape" + str(encrypt_grad_loss_A.shape))
            self.logger.info("encrypt_grad_loss_A" + str(encrypt_grad_loss_A))

        self.loss_grads = encrypt_grad_loss_A
        grads = self.localModel.compute_gradients(self.X)
        self.encrypt_grads_W, self.encrypt_grads_b = self.__compute_encrypt_grads(grads, encrypt_grad_loss_A)
Exemple #9
0
    def _update_gradients(self):

        # y_overlap_2 have shape (len(overlap_indexes), 1),
        # phi has shape (1, feature_dim),
        # y_overlap_2_phi has shape (len(overlap_indexes), 1, feature_dim)
        y_overlap_2_phi = np.expand_dims(self.y_overlap_2 * self.phi, axis=1)

        # uB_2_overlap has shape (len(overlap_indexes), feature_dim, feature_dim)
        enc_y_overlap_2_phi_uB_overlap_2 = encrypt_matmul_3(y_overlap_2_phi, self.enc_uB_overlap_2)
        enc_loss_grads_const_part1 = np.sum(0.25 * np.squeeze(enc_y_overlap_2_phi_uB_overlap_2, axis=1), axis=0)

        if self.is_trace:
            self.logger.debug("enc_y_overlap_2_phi_uB_overlap_2 shape" + str(enc_y_overlap_2_phi_uB_overlap_2.shape))
            self.logger.debug("enc_loss_grads_const_part1 shape" + str(enc_loss_grads_const_part1.shape))

        y_overlap = np.tile(self.y_overlap, (1, self.enc_uB_overlap.shape[-1]))
        enc_loss_grads_const_part2 = compute_sum_XY(y_overlap * 0.5, self.enc_uB_overlap)

        enc_const = enc_loss_grads_const_part1 - enc_loss_grads_const_part2
        enc_const_overlap = np.tile(enc_const, (len(self.overlap_indexes), 1))
        enc_const_nonoverlap = np.tile(enc_const, (len(self.non_overlap_indexes), 1))
        y_non_overlap = np.tile(self.y[self.non_overlap_indexes], (1, self.enc_uB_overlap.shape[-1]))

        if self.is_trace:
            self.logger.debug("enc_const shape:" + str(enc_const.shape))
            self.logger.debug("enc_const_overlap shape" + str(enc_const_overlap.shape))
            self.logger.debug("enc_const_nonoverlap shape" + str(enc_const_nonoverlap.shape))
            self.logger.debug("y_non_overlap shape" + str(y_non_overlap.shape))

        enc_grad_A_nonoverlap = compute_XY(self.alpha * y_non_overlap / len(self.y), enc_const_nonoverlap)
        enc_grad_A_overlap = compute_XY_plus_Z(self.alpha * y_overlap / len(self.y), enc_const_overlap,
                                               self.enc_mapping_comp_B)

        if self.is_trace:
            self.logger.debug("enc_grad_A_nonoverlap shape" + str(enc_grad_A_nonoverlap.shape))
            self.logger.debug("enc_grad_A_overlap shape" + str(enc_grad_A_overlap.shape))

        enc_loss_grad_A = [[0 for _ in range(self.enc_uB_overlap.shape[1])] for _ in range(len(self.y))]
        # TODO: need more efficient way to do following task
        for i, j in enumerate(self.non_overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_nonoverlap[i]
        for i, j in enumerate(self.overlap_indexes):
            enc_loss_grad_A[j] = enc_grad_A_overlap[i]

        enc_loss_grad_A = np.array(enc_loss_grad_A)

        if self.is_trace:
            self.logger.debug("enc_loss_grad_A shape" + str(enc_loss_grad_A.shape))
            self.logger.debug("enc_loss_grad_A" + str(enc_loss_grad_A))

        self.loss_grads = enc_loss_grad_A
        self.enc_grads_W, self.enc_grads_b = self.localModel.compute_encrypted_params_grads(
            self.X, enc_loss_grad_A)
Exemple #10
0
 def __compute_encrypt_grads(self, grads, encrypt_grads):
     grads_W = grads[0]
     grads_b = grads[1]
     encrypt_grads_W = compute_sum_XY(np.expand_dims(encrypt_grads, axis=1), grads_W)
     encrypt_grads_b = compute_sum_XY(encrypt_grads, grads_b)
     return encrypt_grads_W, encrypt_grads_b
Exemple #11
0
 def __compute_encrypt_loss_y(self, encrypt_U_B_overlap, encrypt_U_B_2_overlap, y_overlap, y_A_u_A):
     encrypt_UB_yAuA = encrypt_matmul_2_ob(encrypt_U_B_overlap, y_A_u_A.transpose())
     encrypt_UB_T_UB = np.sum(encrypt_U_B_2_overlap, axis=0)
     wx2 = encrypt_matmul_2_ob(encrypt_matmul_2_ob(y_A_u_A, encrypt_UB_T_UB), y_A_u_A.transpose())
     encrypt_loss_Y = (-0.5 * compute_sum_XY(y_overlap, encrypt_UB_yAuA)[0] + 1.0 / 8 * np.sum(wx2)) + len(y_overlap) * np.log(2)
     return encrypt_loss_Y
Exemple #12
0
 def _update_loss(self):
     U_A_overlap_prime = - self.U_A_overlap / self.feature_dim
     loss_overlap = np.sum(compute_sum_XY(U_A_overlap_prime, self.U_B_overlap))
     loss_Y = self.__compute_encrypt_loss_y(self.U_B_overlap, self.U_B_2_overlap, self.y_overlap, self.y_A_u_A)
     self.loss = self.alpha * loss_Y + loss_overlap