Exemple #1
0
class InterActiveGuestDenseLayer(object):
    def __init__(self, params=None, layer_config=None, model_builder=None):
        self.nn_define = layer_config
        self.layer_config = layer_config

        self.host_input_shape = None
        self.guest_input_shape = None
        self.model = None
        self.rng_generator = random_number_generator.RandomNumberGenerator()
        self.model_builder = model_builder
        self.transfer_variable = None
        self.learning_rate = params.interactive_layer_lr
        self.encrypted_host_dense_output = None

        self.encrypted_host_input = None
        self.guest_input = None
        self.guest_output = None
        self.host_output = None

        self.dense_output_data = None

        self.guest_model = None
        self.host_model = None

        self.partitions = 0
        self.do_backward_select_strategy = False
        self.encrypted_host_input_cached = None
        self.drop_out_keep_rate = params.drop_out_keep_rate
        self.drop_out = None

        self.fixed_point_encoder = None if params.floating_point_precision is None else FixedPointEncoder(
            2**params.floating_point_precision)

        self.sync_output_unit = False

    def set_transfer_variable(self, transfer_variable):
        self.transfer_variable = transfer_variable

    def set_backward_select_strategy(self):
        self.do_backward_select_strategy = True

    def set_partition(self, partition):
        self.partitions = partition

    def __build_model(self, restore_stage=False):
        self.host_model = HostDenseModel()
        self.host_model.build(self.host_input_shape, self.layer_config,
                              self.model_builder, restore_stage)
        self.host_model.set_learning_rate(self.learning_rate)

        self.guest_model = GuestDenseModel()
        self.guest_model.build(self.guest_input_shape, self.layer_config,
                               self.model_builder, restore_stage)
        self.guest_model.set_learning_rate(self.learning_rate)

    def forward(self, guest_input, epoch=0, batch=0, train=True):
        LOGGER.info(
            "interactive layer start forward propagation of epoch {} batch {}".
            format(epoch, batch))
        encrypted_host_input = PaillierTensor(
            self.get_host_encrypted_forward_from_host(epoch, batch))

        if not self.partitions:
            self.partitions = encrypted_host_input.partitions

        self.encrypted_host_input = encrypted_host_input
        self.guest_input = guest_input

        if self.guest_model is None:
            LOGGER.info("building interactive layers' training model")
            self.host_input_shape = encrypted_host_input.shape[1]
            self.guest_input_shape = guest_input.shape[
                1] if guest_input is not None else 0
            self.__build_model()

        if not self.sync_output_unit:
            self.sync_output_unit = True
            self.sync_interactive_layer_output_unit(
                self.host_model.output_shape[0])

        host_output = self.forward_interactive(encrypted_host_input, epoch,
                                               batch, train)

        guest_output = self.guest_model.forward_dense(guest_input)

        if not self.guest_model.empty:
            dense_output_data = host_output + PaillierTensor(
                guest_output, partitions=self.partitions)
        else:
            dense_output_data = host_output

        self.dense_output_data = dense_output_data

        self.guest_output = guest_output
        self.host_output = host_output

        LOGGER.info(
            "start to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))
        activation_out = self.host_model.forward_activation(
            self.dense_output_data.numpy())
        LOGGER.info(
            "end to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))

        if train and self.drop_out:
            activation_out = self.drop_out.forward(activation_out)

        return activation_out

    def backward(self, output_gradient, selective_ids, epoch, batch):
        if selective_ids:
            self.host_model.select_backward_sample(selective_ids)
            self.guest_model.select_backward_sample(selective_ids)
            if self.drop_out:
                self.drop_out.select_backward_sample(selective_ids)

        if self.do_backward_select_strategy:
            self.sync_backward_select_info(selective_ids, len(output_gradient),
                                           epoch, batch)

        if len(output_gradient) > 0:
            LOGGER.debug(
                "interactive layer start backward propagation of epoch {} batch {}"
                .format(epoch, batch))
            activation_backward = self.host_model.backward_activation()[0]

            activation_gradient = output_gradient * activation_backward
            if self.drop_out:
                activation_gradient = self.drop_out.backward(
                    activation_gradient)

            LOGGER.debug(
                "interactive layer update guest weight of epoch {} batch {}".
                format(epoch, batch))
            guest_input_gradient = self.update_guest(activation_gradient)

            host_weight_gradient, acc_noise = self.backward_interactive(
                activation_gradient, epoch, batch)

            host_input_gradient = self.update_host(activation_gradient,
                                                   host_weight_gradient,
                                                   acc_noise)

            self.send_host_backward_to_host(host_input_gradient.get_obj(),
                                            epoch, batch)

            return guest_input_gradient
        else:
            return []

    def _create_drop_out(self, shape):
        if self.drop_out_keep_rate and self.drop_out_keep_rate != 1:
            if not self.drop_out:
                self.drop_out = DropOut(noise_shape=shape,
                                        rate=self.drop_out_keep_rate)
                self.drop_out.set_partition(self.partitions)
                if self.do_backward_select_strategy:
                    self.drop_out.do_backward_select_strategy()

            self.drop_out.generate_mask()

    def sync_interactive_layer_output_unit(self, shape):
        self.transfer_variable.interactive_layer_output_unit.remote(
            shape, role=consts.HOST, idx=0)

    def sync_backward_select_info(self, selective_ids, gradient_len, epoch,
                                  batch):
        self.transfer_variable.selective_info.remote(
            (selective_ids, gradient_len),
            role=consts.HOST,
            idx=0,
            suffix=(
                epoch,
                batch,
            ))

    def send_host_backward_to_host(self, host_error, epoch, batch):
        self.transfer_variable.host_backward.remote(host_error,
                                                    role=consts.HOST,
                                                    idx=0,
                                                    suffix=(
                                                        epoch,
                                                        batch,
                                                    ))

    def update_guest(self, activation_gradient):
        input_gradient = self.guest_model.get_input_gradient(
            activation_gradient)
        weight_gradient = self.guest_model.get_weight_gradient(
            activation_gradient)
        self.guest_model.apply_update(weight_gradient)

        return input_gradient

    def update_host(self, activation_gradient, weight_gradient, acc_noise):
        activation_gradient_tensor = PaillierTensor(activation_gradient,
                                                    partitions=self.partitions)
        input_gradient = self.host_model.get_input_gradient(
            activation_gradient_tensor,
            acc_noise,
            encoder=self.fixed_point_encoder)
        # input_gradient = self.host_model.get_input_gradient(activation_gradient, acc_noise)

        self.host_model.update_weight(weight_gradient)
        self.host_model.update_bias(activation_gradient)

        return input_gradient

    def forward_interactive(self,
                            encrypted_host_input,
                            epoch,
                            batch,
                            train=True):
        LOGGER.info(
            "get encrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        mask_table = None

        encrypted_dense_output = self.host_model.forward_dense(
            encrypted_host_input, self.fixed_point_encoder)
        if train:
            self._create_drop_out(encrypted_dense_output.shape)
            if self.drop_out:
                mask_table = self.drop_out.generate_mask_table()

        self.encrypted_host_dense_output = encrypted_dense_output

        if mask_table:
            encrypted_dense_output = encrypted_dense_output.select_columns(
                mask_table)

        guest_forward_noise = self.rng_generator.fast_generate_random_number(
            encrypted_dense_output.shape,
            encrypted_dense_output.partitions,
            keep_table=mask_table)

        if self.fixed_point_encoder:
            encrypted_dense_output += guest_forward_noise.encode(
                self.fixed_point_encoder)
        else:
            encrypted_dense_output += guest_forward_noise

        self.send_guest_encrypted_forward_output_with_noise_to_host(
            encrypted_dense_output.get_obj(), epoch, batch)
        if mask_table:
            self.send_interactive_layer_drop_out_table(mask_table, epoch,
                                                       batch)

        LOGGER.info(
            "get decrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        decrypted_dense_output = self.get_guest_decrypted_forward_from_host(
            epoch, batch)

        if mask_table:
            out = PaillierTensor(decrypted_dense_output) - guest_forward_noise
            out = out.get_obj().join(mask_table, self.expand_columns)
            return PaillierTensor(out)
        else:
            return PaillierTensor(decrypted_dense_output) - guest_forward_noise

    def backward_interactive(self, activation_gradient, epoch, batch):
        LOGGER.info(
            "get encrypted weight gradient of epoch {} batch {}".format(
                epoch, batch))
        encrypted_weight_gradient = self.host_model.get_weight_gradient(
            activation_gradient, encoder=self.fixed_point_encoder)
        if self.fixed_point_encoder:
            encrypted_weight_gradient = self.fixed_point_encoder.decode(
                encrypted_weight_gradient)

        noise_w = self.rng_generator.generate_random_number(
            encrypted_weight_gradient.shape)
        self.transfer_variable.encrypted_guest_weight_gradient.remote(
            encrypted_weight_gradient + noise_w,
            role=consts.HOST,
            idx=-1,
            suffix=(
                epoch,
                batch,
            ))

        LOGGER.info(
            "get decrypted weight graident of epoch {} batch {}".format(
                epoch, batch))
        decrypted_weight_gradient = self.transfer_variable.decrypted_guest_weight_gradient.get(
            idx=0, suffix=(
                epoch,
                batch,
            ))

        decrypted_weight_gradient -= noise_w

        encrypted_acc_noise = self.get_encrypted_acc_noise_from_host(
            epoch, batch)

        return decrypted_weight_gradient, encrypted_acc_noise

    def get_host_encrypted_forward_from_host(self, epoch, batch):
        return self.transfer_variable.encrypted_host_forward.get(idx=0,
                                                                 suffix=(
                                                                     epoch,
                                                                     batch,
                                                                 ))

    def send_guest_encrypted_forward_output_with_noise_to_host(
            self, encrypted_guest_forward_with_noise, epoch, batch):
        return self.transfer_variable.encrypted_guest_forward.remote(
            encrypted_guest_forward_with_noise,
            role=consts.HOST,
            idx=-1,
            suffix=(
                epoch,
                batch,
            ))

    def send_interactive_layer_drop_out_table(self, mask_table, epoch, batch):
        return self.transfer_variable.drop_out_table.remote(mask_table,
                                                            role=consts.HOST,
                                                            idx=-1,
                                                            suffix=(
                                                                epoch,
                                                                batch,
                                                            ))

    def get_guest_decrypted_forward_from_host(self, epoch, batch):
        return self.transfer_variable.decrypted_guest_fowrad.get(idx=0,
                                                                 suffix=(
                                                                     epoch,
                                                                     batch,
                                                                 ))

    def get_encrypted_acc_noise_from_host(self, epoch, batch):
        return self.transfer_variable.encrypted_acc_noise.get(idx=0,
                                                              suffix=(
                                                                  epoch,
                                                                  batch,
                                                              ))

    def get_output_shape(self):
        return self.host_model.output_shape

    @staticmethod
    def expand_columns(tensor, keep_array):
        shape = keep_array.shape
        tensor = np.reshape(tensor, (tensor.size, ))
        keep = np.reshape(keep_array, (keep_array.size, ))
        ret_tensor = []
        idx = 0
        for x in keep:
            if x == 0:
                ret_tensor.append(0)
            else:
                ret_tensor.append(tensor[idx])
                idx += 1

        return np.reshape(np.array(ret_tensor), shape)

    def export_model(self):
        interactive_layer_param = InteractiveLayerParam()
        interactive_layer_param.interactive_guest_saved_model_bytes = self.guest_model.export_model(
        )
        interactive_layer_param.interactive_host_saved_model_bytes = self.host_model.export_model(
        )
        interactive_layer_param.host_input_shape = self.host_input_shape
        interactive_layer_param.guest_input_shape = self.guest_input_shape

        return interactive_layer_param

    def restore_model(self, interactive_layer_param):
        self.host_input_shape = interactive_layer_param.host_input_shape
        self.guest_input_shape = interactive_layer_param.guest_input_shape

        self.__build_model(restore_stage=True)
        self.guest_model.restore_model(
            interactive_layer_param.interactive_guest_saved_model_bytes)
        self.host_model.restore_model(
            interactive_layer_param.interactive_host_saved_model_bytes)
Exemple #2
0
class InterActiveGuestDenseLayer(object):
    def __init__(self, params=None, layer_config=None, model_builder=None):
        self.nn_define = layer_config
        self.layer_config = layer_config

        self.host_input_shape = None
        self.guest_input_shape = None
        self.model = None
        self.rng_generator = random_number_generator.RandomNumberGenerator()
        self.model_builder = model_builder
        self.transfer_variable = None
        self.learning_rate = params.interactive_layer_lr
        self.encrypted_host_dense_output = None

        self.encrypted_host_input = None
        self.guest_input = None
        self.guest_output = None
        self.host_output = None

        self.dense_output_data = None

        self.guest_model = None
        self.host_model = None

        self.partitions = 0

    def set_transfer_variable(self, transfer_variable):
        self.transfer_variable = transfer_variable

    def set_partition(self, partition):
        self.partitions = partition

    def __build_model(self, restore_stage=False):
        self.host_model = HostDenseModel()
        self.host_model.build(self.host_input_shape, self.layer_config,
                              self.model_builder, restore_stage)
        self.host_model.set_learning_rate(self.learning_rate)

        self.guest_model = GuestDenseModel()
        self.guest_model.build(self.guest_input_shape, self.layer_config,
                               self.model_builder, restore_stage)
        self.guest_model.set_learning_rate(self.learning_rate)

    def forward(self, guest_input, epoch=0, batch=0):
        LOGGER.info(
            "interactive layer start forward propagation of epoch {} batch {}".
            format(epoch, batch))
        encrypted_host_input = PaillierTensor(
            tb_obj=self.get_host_encrypted_forward_from_host(epoch, batch))

        if not self.partitions:
            self.partitions = encrypted_host_input.partitions

        self.encrypted_host_input = encrypted_host_input
        self.guest_input = guest_input

        if self.guest_model is None:
            LOGGER.info("building interactive layers' training model")
            self.host_input_shape = encrypted_host_input.shape[1]
            self.guest_input_shape = guest_input.shape[
                1] if guest_input is not None else 0
            self.__build_model()

        host_output = self.forward_interactive(encrypted_host_input, epoch,
                                               batch)

        guest_output = self.guest_model.forward_dense(guest_input)

        if not self.guest_model.empty:
            dense_output_data = host_output + PaillierTensor(
                ori_data=guest_output, partitions=self.partitions)
        else:
            dense_output_data = host_output

        self.dense_output_data = dense_output_data

        self.guest_output = guest_output
        self.host_output = host_output

        LOGGER.info(
            "start to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))
        activation_out = self.host_model.forward_activation(
            self.dense_output_data.numpy())
        LOGGER.info(
            "end to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))

        return activation_out

    def backward(self, output_gradient, epoch, batch):
        LOGGER.debug(
            "interactive layer start backward propagation of epoch {} batch {}"
            .format(epoch, batch))
        activation_backward = self.host_model.backward_activation()[0]

        activation_gradient = output_gradient * activation_backward

        LOGGER.debug(
            "interactive layer update guest weight of epoch {} batch {}".
            format(epoch, batch))
        guest_input_gradient = self.update_guest(activation_gradient)

        host_weight_gradient, acc_noise = self.backward_interactive(
            activation_gradient, epoch, batch)

        host_input_gradient = self.update_host(activation_gradient,
                                               host_weight_gradient, acc_noise)

        self.send_host_backward_to_host(host_input_gradient.get_obj(), epoch,
                                        batch)

        return guest_input_gradient

    def send_host_backward_to_host(self, host_error, epoch, batch):
        self.transfer_variable.host_backward.remote(host_error,
                                                    role=consts.HOST,
                                                    idx=0,
                                                    suffix=(
                                                        epoch,
                                                        batch,
                                                    ))

    def update_guest(self, activation_gradient):
        input_gradient = self.guest_model.get_input_gradient(
            activation_gradient)
        weight_gradient = self.guest_model.get_weight_gradient(
            activation_gradient)
        self.guest_model.apply_update(weight_gradient)

        return input_gradient

    def update_host(self, activation_gradient, weight_gradient, acc_noise):
        activation_gradient_tensor = PaillierTensor(
            ori_data=activation_gradient, partitions=self.partitions)
        input_gradient = self.host_model.get_input_gradient(
            activation_gradient_tensor, acc_noise)
        # input_gradient = self.host_model.get_input_gradient(activation_gradient, acc_noise)

        self.host_model.update_weight(weight_gradient)
        self.host_model.update_bias(activation_gradient)

        return input_gradient

    def forward_interactive(self, encrypted_host_input, epoch, batch):
        LOGGER.info(
            "get encrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        encrypted_dense_output = self.host_model.forward_dense(
            encrypted_host_input)

        self.encrypted_host_dense_output = encrypted_dense_output

        guest_forward_noise = self.rng_generator.fast_generate_random_number(
            encrypted_dense_output.shape, encrypted_dense_output.partitions)

        encrypted_dense_output += guest_forward_noise

        self.send_guest_encrypted_forward_output_with_noise_to_host(
            encrypted_dense_output.get_obj(), epoch, batch)

        LOGGER.info(
            "get decrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        decrypted_dense_output = self.get_guest_decrypted_forward_from_host(
            epoch, batch)

        return PaillierTensor(
            tb_obj=decrypted_dense_output) - guest_forward_noise

    def backward_interactive(self, activation_gradient, epoch, batch):
        LOGGER.info(
            "get encrypted weight gradient of epoch {} batch {}".format(
                epoch, batch))
        encrypted_weight_gradient = self.host_model.get_weight_gradient(
            activation_gradient)

        noise_w = self.rng_generator.generate_random_number(
            encrypted_weight_gradient.shape)
        self.transfer_variable.encrypted_guest_weight_gradient.remote(
            encrypted_weight_gradient + noise_w,
            role=consts.HOST,
            idx=-1,
            suffix=(
                epoch,
                batch,
            ))

        LOGGER.info(
            "get decrypted weight graident of epoch {} batch {}".format(
                epoch, batch))
        decrypted_weight_gradient = self.transfer_variable.decrypted_guest_weight_gradient.get(
            idx=0, suffix=(
                epoch,
                batch,
            ))

        decrypted_weight_gradient -= noise_w

        encrypted_acc_noise = self.get_encrypted_acc_noise_from_host(
            epoch, batch)

        return decrypted_weight_gradient, encrypted_acc_noise

    def get_host_encrypted_forward_from_host(self, epoch, batch):
        return self.transfer_variable.encrypted_host_forward.get(idx=0,
                                                                 suffix=(
                                                                     epoch,
                                                                     batch,
                                                                 ))

    def send_guest_encrypted_forward_output_with_noise_to_host(
            self, encrypted_guest_forward_with_noise, epoch, batch):
        return self.transfer_variable.encrypted_guest_forward.remote(
            encrypted_guest_forward_with_noise,
            role=consts.HOST,
            idx=-1,
            suffix=(
                epoch,
                batch,
            ))

    def get_guest_decrypted_forward_from_host(self, epoch, batch):
        return self.transfer_variable.decrypted_guest_fowrad.get(idx=0,
                                                                 suffix=(
                                                                     epoch,
                                                                     batch,
                                                                 ))

    def get_encrypted_acc_noise_from_host(self, epoch, batch):
        return self.transfer_variable.encrypted_acc_noise.get(idx=0,
                                                              suffix=(
                                                                  epoch,
                                                                  batch,
                                                              ))

    def get_output_shape(self):
        return self.host_model.output_shape

    def export_model(self):
        interactive_layer_param = InteractiveLayerParam()
        interactive_layer_param.interactive_guest_saved_model_bytes = self.guest_model.export_model(
        )
        interactive_layer_param.interactive_host_saved_model_bytes = self.host_model.export_model(
        )
        interactive_layer_param.host_input_shape = self.host_input_shape
        interactive_layer_param.guest_input_shape = self.guest_input_shape

        return interactive_layer_param

    def restore_model(self, interactive_layer_param):
        self.host_input_shape = interactive_layer_param.host_input_shape
        self.guest_input_shape = interactive_layer_param.guest_input_shape

        self.__build_model(restore_stage=True)
        self.guest_model.restore_model(
            interactive_layer_param.interactive_guest_saved_model_bytes)
        self.host_model.restore_model(
            interactive_layer_param.interactive_host_saved_model_bytes)