Esempio n. 1
0
    def forward_interactive(self,
                            encrypted_host_input,
                            epoch,
                            batch,
                            train=True):
        LOGGER.info(
            "get encrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        mask_table = None

        encrypted_dense_output = self.host_model.forward_dense(
            encrypted_host_input, self.fixed_point_encoder)
        if train:
            self._create_drop_out(encrypted_dense_output.shape)
            if self.drop_out:
                mask_table = self.drop_out.generate_mask_table()

        self.encrypted_host_dense_output = encrypted_dense_output

        if mask_table:
            encrypted_dense_output = encrypted_dense_output.select_columns(
                mask_table)

        guest_forward_noise = self.rng_generator.fast_generate_random_number(
            encrypted_dense_output.shape,
            encrypted_dense_output.partitions,
            keep_table=mask_table)

        if self.fixed_point_encoder:
            encrypted_dense_output += guest_forward_noise.encode(
                self.fixed_point_encoder)
        else:
            encrypted_dense_output += guest_forward_noise

        self.send_guest_encrypted_forward_output_with_noise_to_host(
            encrypted_dense_output.get_obj(), epoch, batch)
        if mask_table:
            self.send_interactive_layer_drop_out_table(mask_table, epoch,
                                                       batch)

        LOGGER.info(
            "get decrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        decrypted_dense_output = self.get_guest_decrypted_forward_from_host(
            epoch, batch)

        if mask_table:
            out = PaillierTensor(
                tb_obj=decrypted_dense_output) - guest_forward_noise
            out = out.get_obj().join(mask_table, self.expand_columns)
            return PaillierTensor(tb_obj=out)
        else:
            return PaillierTensor(
                tb_obj=decrypted_dense_output) - guest_forward_noise
Esempio n. 2
0
class HostDenseModel(DenseModel):
    def __init__(self):
        super(HostDenseModel, self).__init__()
        self.role = "host"

    def select_backward_sample(self, selective_ids):
        cached_shape = self.input_cached.shape[0]
        offsets = [i + cached_shape for i in range(len(selective_ids))]
        id_map = dict(zip(selective_ids, offsets))
        if cached_shape == 0:
            self.input_cached = (self.input.get_obj().filter(
                lambda k, v: k in id_map).map(lambda k, v: (id_map[k], v)))
            self.input_cached = PaillierTensor(tb_obj=self.input_cached)
            # selective_ids_tb = session.parallelize(zip(selective_ids, range(len(selective_ids))), include_key=True,
            #                                        partition=self.input.partitions)
            # self.input_cached = self.input.get_obj().join(selective_ids_tb, lambda v1, v2: (v1, v2))
            # self.input_cached = PaillierTensor(tb_obj=self.input_cached.map(lambda k, v: (v[1], v[0])))
            self.activation_cached = self.activation_input[selective_ids]
        else:
            # selective_ids_tb = session.parallelize(zip(selective_ids, range(len(selective_ids))), include_key=True,
            #                                        partition=self.input.partitions)
            # selective_input = self.input.get_obj().join(selective_ids_tb, lambda v1, v2: (v1, v2))
            # pre_count = self.input_cached.shape[0]
            # selective_input = selective_input.map(lambda k, v: (v[1] + pre_count, v[0]))
            selective_input = (self.input.get_obj().filter(
                lambda k, v: k in id_map).map(lambda k, v: (id_map[k], v)))
            self.input_cached = PaillierTensor(
                tb_obj=self.input_cached.get_obj().union(selective_input))
            self.activation_cached = np.vstack(
                (self.activation_cached, self.activation_input[selective_ids]))

    def forward_dense(self, x, encoder=None):
        self.input = x

        if encoder is not None:
            output = x * encoder.encode(self.model_weight)
        else:
            output = x * self.model_weight

        if self.bias is not None:
            if encoder is not None:
                output += encoder.encode(self.bias)
            else:
                output += self.bias

        return output

    def get_input_gradient(self, delta, acc_noise, encoder=None):
        if not encoder:
            error = delta * self.model_weight.T + delta * acc_noise.T
        else:
            error = delta.encode(encoder) * (self.model_weight + acc_noise).T

        return error

    def get_weight_gradient(self, delta, encoder=None):
        # delta_w = self.input.fast_matmul_2d(delta) / self.input.shape[0]
        if self.do_backward_selective_strategy:
            self.input = self.input_cached.filter(
                lambda k, v: k < self.batch_size)
            self.input_cached = self.input_cached.filter(
                lambda k, v: k >= self.batch_size).map(
                    lambda kv: (kv[0] - self.batch_size, kv[1]))
            # self.input_cached = self.input_cached.subtractByKey(self.input).map(lambda kv: (kv[0] - self.batch_size, kv[1]))

        if encoder:
            delta_w = self.input.fast_matmul_2d(encoder.encode(delta))
        else:
            delta_w = self.input.fast_matmul_2d(delta)

        delta_w /= self.input.shape[0]

        return delta_w

    def update_weight(self, delta):
        self.model_weight -= delta * self.lr

    def update_bias(self, delta):
        self.bias -= np.mean(delta, axis=0) * self.lr