Exemple #1
0
    def test_tensor_op(self):

        arr1 = np.ones((10, 1, 3))
        arr1[0] = np.array([[2, 3, 4]])
        arr2 = np.ones((10, 3, 3))
        arr3 = np.ones([1, 1, 3])

        arr4 = np.ones([50, 1])
        arr5 = np.ones([32])

        pt = PaillierTensor(ori_data=arr1)
        pt2 = PaillierTensor(ori_data=arr2)
        pt3 = PaillierTensor(ori_data=arr3)

        pt4 = PaillierTensor(ori_data=arr4)
        pt5 = PaillierTensor(ori_data=arr5)

        encrypter = PaillierEncrypt()
        encrypter.generate_key(EncryptParam().key_length)
        encrypted_calculator = EncryptModeCalculator(
            encrypter,
            EncryptedModeCalculatorParam().mode,
            EncryptedModeCalculatorParam().re_encrypted_rate)
        rs1 = pt * arr2
        rs2 = pt * pt2
        rs3 = pt.matmul_3d(pt2)
        enpt = pt2.encrypt(encrypted_calculator)
        enrs = enpt.matmul_3d(arr1, multiply='right')

        rng_generator = random_number_generator.RandomNumberGenerator()

        enpt2 = pt4.encrypt(encrypted_calculator)
        random_num = rng_generator.generate_random_number(enpt2.shape)
Exemple #2
0
    def forward(self, host_input, epoch=0, batch=0):
        if batch >= len(self.train_encrypted_calculator):
            self.train_encrypted_calculator.append(
                self.generated_encrypted_calculator())

        LOGGER.info(
            "forward propagation: encrypt host_bottom_output of epoch {} batch {}"
            .format(epoch, batch))
        host_input = PaillierTensor(ori_data=host_input,
                                    partitions=self.partitions)
        encrypted_host_input = host_input.encrypt(
            self.train_encrypted_calculator[batch])
        self.send_host_encrypted_forward_to_guest(
            encrypted_host_input.get_obj(), epoch, batch)

        encrypted_guest_forward = PaillierTensor(
            tb_obj=self.get_guest_encrypted_forwrad_from_guest(epoch, batch))

        decrypted_guest_forward = encrypted_guest_forward.decrypt(
            self.encrypter)

        if self.acc_noise is None:
            self.input_shape = host_input.shape[1]
            self.output_unit = encrypted_guest_forward.shape[1]
            self.acc_noise = np.zeros((self.input_shape, self.output_unit))
        """some bugs here"""
        decrypted_guest_forward_with_noise = decrypted_guest_forward + host_input * self.acc_noise

        self.send_decrypted_guest_forward_with_noise_to_guest(
            decrypted_guest_forward_with_noise.get_obj(), epoch, batch)
Exemple #3
0
 def select_backward_sample(self, selective_ids):
     cached_shape = self.input_cached.shape[0]
     offsets = [i + cached_shape for i in range(len(selective_ids))]
     id_map = dict(zip(selective_ids, offsets))
     if cached_shape == 0:
         self.input_cached = (self.input.get_obj().filter(
             lambda k, v: k in id_map).map(lambda k, v: (id_map[k], v)))
         self.input_cached = PaillierTensor(tb_obj=self.input_cached)
         # selective_ids_tb = session.parallelize(zip(selective_ids, range(len(selective_ids))), include_key=True,
         #                                        partition=self.input.partitions)
         # self.input_cached = self.input.get_obj().join(selective_ids_tb, lambda v1, v2: (v1, v2))
         # self.input_cached = PaillierTensor(tb_obj=self.input_cached.map(lambda k, v: (v[1], v[0])))
         self.activation_cached = self.activation_input[selective_ids]
     else:
         # selective_ids_tb = session.parallelize(zip(selective_ids, range(len(selective_ids))), include_key=True,
         #                                        partition=self.input.partitions)
         # selective_input = self.input.get_obj().join(selective_ids_tb, lambda v1, v2: (v1, v2))
         # pre_count = self.input_cached.shape[0]
         # selective_input = selective_input.map(lambda k, v: (v[1] + pre_count, v[0]))
         selective_input = (self.input.get_obj().filter(
             lambda k, v: k in id_map).map(lambda k, v: (id_map[k], v)))
         self.input_cached = PaillierTensor(
             tb_obj=self.input_cached.get_obj().union(selective_input))
         self.activation_cached = np.vstack(
             (self.activation_cached, self.activation_input[selective_ids]))
Exemple #4
0
 def setUp(self):
     session.init("test_paillier_tensor" + str(random.random()), 0)
     self.data1 = np.ones((1000, 10))
     self.data2 = np.ones((1000, 10))
     self.paillier_tensor1 = PaillierTensor(ori_data=self.data1,
                                            partitions=10)
     self.paillier_tensor2 = PaillierTensor(ori_data=self.data2,
                                            partitions=10)
Exemple #5
0
    def forward_interactive(self,
                            encrypted_host_input,
                            epoch,
                            batch,
                            train=True):
        LOGGER.info(
            "get encrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        mask_table = None

        encrypted_dense_output = self.host_model.forward_dense(
            encrypted_host_input, self.fixed_point_encoder)
        if train:
            self._create_drop_out(encrypted_dense_output.shape)
            if self.drop_out:
                mask_table = self.drop_out.generate_mask_table()

        self.encrypted_host_dense_output = encrypted_dense_output

        if mask_table:
            encrypted_dense_output = encrypted_dense_output.select_columns(
                mask_table)

        guest_forward_noise = self.rng_generator.fast_generate_random_number(
            encrypted_dense_output.shape,
            encrypted_dense_output.partitions,
            keep_table=mask_table)

        if self.fixed_point_encoder:
            encrypted_dense_output += guest_forward_noise.encode(
                self.fixed_point_encoder)
        else:
            encrypted_dense_output += guest_forward_noise

        self.send_guest_encrypted_forward_output_with_noise_to_host(
            encrypted_dense_output.get_obj(), epoch, batch)
        if mask_table:
            self.send_interactive_layer_drop_out_table(mask_table, epoch,
                                                       batch)

        LOGGER.info(
            "get decrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        decrypted_dense_output = self.get_guest_decrypted_forward_from_host(
            epoch, batch)

        if mask_table:
            out = PaillierTensor(
                tb_obj=decrypted_dense_output) - guest_forward_noise
            out = out.get_obj().join(mask_table, self.expand_columns)
            return PaillierTensor(tb_obj=out)
        else:
            return PaillierTensor(
                tb_obj=decrypted_dense_output) - guest_forward_noise
    def fast_generate_random_number(self, shape, partition=10, mixed_rate=MIXED_RATE, keep_table=None):
        if keep_table:
            tb = keep_table.mapValues(lambda keep_array: self.generate_random_number(keep=keep_array,
                                                                                     mixed_rate=mixed_rate))
            return PaillierTensor(tb_obj=tb)
        else:
            tb = computing_session.parallelize([None for _ in range(shape[0])], include_key=False, partition=partition)

            tb = tb.mapValues(lambda val: self.generate_random_number(shape[1:], mixed_rate=mixed_rate))

            return PaillierTensor(tb_obj=tb)
Exemple #7
0
    def forward(self, guest_input, epoch=0, batch=0, train=True):
        LOGGER.info(
            "interactive layer start forward propagation of epoch {} batch {}".
            format(epoch, batch))
        encrypted_host_input = PaillierTensor(
            tb_obj=self.get_host_encrypted_forward_from_host(epoch, batch))

        if not self.partitions:
            self.partitions = encrypted_host_input.partitions

        self.encrypted_host_input = encrypted_host_input
        self.guest_input = guest_input

        if self.guest_model is None:
            LOGGER.info("building interactive layers' training model")
            self.host_input_shape = encrypted_host_input.shape[1]
            self.guest_input_shape = guest_input.shape[
                1] if guest_input is not None else 0
            self.__build_model()

        if not self.sync_output_unit:
            self.sync_output_unit = True
            self.sync_interactive_layer_output_unit(
                self.host_model.output_shape[0])

        host_output = self.forward_interactive(encrypted_host_input, epoch,
                                               batch, train)

        guest_output = self.guest_model.forward_dense(guest_input)

        if not self.guest_model.empty:
            dense_output_data = host_output + PaillierTensor(
                ori_data=guest_output, partitions=self.partitions)
        else:
            dense_output_data = host_output

        self.dense_output_data = dense_output_data

        self.guest_output = guest_output
        self.host_output = host_output

        LOGGER.info(
            "start to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))
        activation_out = self.host_model.forward_activation(
            self.dense_output_data.numpy())
        LOGGER.info(
            "end to get interactive layer's activation output of epoch {} batch {}"
            .format(epoch, batch))

        if train and self.drop_out:
            activation_out = self.drop_out.forward(activation_out)

        return activation_out
Exemple #8
0
    def backward(self, epoch, batch):
        encrypted_guest_weight_gradient = self.get_guest_encrypted_weight_gradient_from_guest(
            epoch, batch)

        LOGGER.info("decrypt weight gradient of epoch {} batch {}".format(
            epoch, batch))
        decrypted_guest_weight_gradient = self.encrypter.recursive_decrypt(
            encrypted_guest_weight_gradient)

        noise_weight_gradient = self.rng_generator.generate_random_number(
            (self.input_shape, self.output_unit))

        decrypted_guest_weight_gradient += noise_weight_gradient / self.learning_rate

        self.send_guest_decrypted_weight_gradient_to_guest(
            decrypted_guest_weight_gradient, epoch, batch)

        LOGGER.info("encrypt acc_noise of epoch {} batch {}".format(
            epoch, batch))
        encrypted_acc_noise = self.encrypter.recursive_encrypt(self.acc_noise)
        self.send_encrypted_acc_noise_to_guest(encrypted_acc_noise, epoch,
                                               batch)

        self.acc_noise += noise_weight_gradient
        host_input_gradient = PaillierTensor(
            tb_obj=self.get_host_backward_from_guest(epoch, batch))

        host_input_gradient = host_input_gradient.decrypt(
            self.encrypter).numpy()

        return host_input_gradient
Exemple #9
0
    def exchange_components(self, comp_to_send, epoch_idx):
        """
        compute host components and sent to guest
        """

        if self.mode == 'encrypted':
            comp_to_send = self.encrypt_tensor(comp_to_send)

        # receiving guest components
        y_overlap_2_phi_2 = self.transfer_variable.y_overlap_2_phi_2.get(
            idx=0, suffix=(epoch_idx, ))
        y_overlap_phi = self.transfer_variable.y_overlap_phi.get(
            idx=0, suffix=(epoch_idx, ))
        mapping_comp_a = self.transfer_variable.mapping_comp_a.get(
            idx=0, suffix=(epoch_idx, ))
        guest_components = [y_overlap_2_phi_2, y_overlap_phi, mapping_comp_a]

        # sending host components
        self.transfer_variable.overlap_ub.remote(comp_to_send[0],
                                                 suffix=(epoch_idx, ))
        self.transfer_variable.overlap_ub_2.remote(comp_to_send[1],
                                                   suffix=(epoch_idx, ))
        self.transfer_variable.mapping_comp_b.remote(comp_to_send[2],
                                                     suffix=(epoch_idx, ))

        if self.mode == 'encrypted':
            guest_paillier_tensors = [
                PaillierTensor(tb_obj=tb, partitions=self.partitions)
                for tb in guest_components
            ]
            return guest_paillier_tensors
        else:
            return guest_components
Exemple #10
0
    def forward_interactive(self, encrypted_host_input, epoch, batch):
        LOGGER.info(
            "get encrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        encrypted_dense_output = self.host_model.forward_dense(
            encrypted_host_input)

        self.encrypted_host_dense_output = encrypted_dense_output

        guest_forward_noise = self.rng_generator.fast_generate_random_number(
            encrypted_dense_output.shape, encrypted_dense_output.partitions)

        encrypted_dense_output += guest_forward_noise

        self.send_guest_encrypted_forward_output_with_noise_to_host(
            encrypted_dense_output.get_obj(), epoch, batch)

        LOGGER.info(
            "get decrypted dense output of host model of epoch {} batch {}".
            format(epoch, batch))
        decrypted_dense_output = self.get_guest_decrypted_forward_from_host(
            epoch, batch)

        return PaillierTensor(
            tb_obj=decrypted_dense_output) - guest_forward_noise
Exemple #11
0
    def decrypt_guest_data(self, epoch_idx, local_round=-1):

        encrypted_consts = self.transfer_variable.guest_side_const.get(suffix=(
            epoch_idx,
            local_round,
        ),
                                                                       idx=0)
        grad_table = self.transfer_variable.guest_side_gradients.get(suffix=(
            epoch_idx,
            local_round,
        ),
                                                                     idx=0)

        inter_grad = PaillierTensor(tb_obj=grad_table,
                                    partitions=self.partitions)
        decrpyted_grad = inter_grad.decrypt(self.encrypter)
        decrypted_const = self.encrypter.recursive_decrypt(encrypted_consts)

        self.transfer_variable.decrypted_guest_const.remote(decrypted_const,
                                                            suffix=(
                                                                epoch_idx,
                                                                local_round,
                                                            ))
        self.transfer_variable.decrypted_guest_gradients.remote(
            decrpyted_grad.get_obj(), suffix=(
                epoch_idx,
                local_round,
            ))
Exemple #12
0
    def exchange_components(self, comp_to_send, epoch_idx):

        """
        send guest components and get host components
        """

        if self.mode == 'encrypted':
            comp_to_send = self.encrypt_tensor(comp_to_send)

        # sending [y_overlap_2_phi_2, y_overlap_phi, mapping_comp_a]
        self.transfer_variable.y_overlap_2_phi_2.remote(comp_to_send[0], suffix=(epoch_idx, ))
        self.transfer_variable.y_overlap_phi.remote(comp_to_send[1], suffix=(epoch_idx, ))
        self.transfer_variable.mapping_comp_a.remote(comp_to_send[2], suffix=(epoch_idx, ))

        # receiving [overlap_ub, overlap_ub_2, mapping_comp_b]
        overlap_ub = self.transfer_variable.overlap_ub.get(idx=0, suffix=(epoch_idx, ))
        overlap_ub_2 = self.transfer_variable.overlap_ub_2.get(idx=0, suffix=(epoch_idx, ))
        mapping_comp_b = self.transfer_variable.mapping_comp_b.get(idx=0, suffix=(epoch_idx, ))
        host_components = [overlap_ub, overlap_ub_2, mapping_comp_b]

        if self.mode == 'encrypted':
            host_paillier_tensors = [PaillierTensor(tb_obj=tb, partitions=self.partitions) for tb in host_components]
            return host_paillier_tensors
        else:
            return host_components
Exemple #13
0
    def decrypt_inter_result(self, loss_grad_b, epoch_idx, local_round=-1):

        rand_0 = PaillierTensor(
            ori_data=self.rng_generator.generate_random_number(
                loss_grad_b.shape),
            partitions=self.partitions)
        grad_a_overlap = loss_grad_b + rand_0
        self.transfer_variable.host_side_gradients.remote(
            grad_a_overlap.get_obj(),
            suffix=(epoch_idx, local_round, 'host_de_send'))
        de_loss_grad_b = self.transfer_variable.decrypted_host_gradients\
                                               .get(suffix=(epoch_idx, local_round, 'host_de_get'), idx=0)
        de_loss_grad_b = PaillierTensor(tb_obj=de_loss_grad_b,
                                        partitions=self.partitions) - rand_0

        return de_loss_grad_b
    def fast_generate_random_number(self, shape, partition=10):
        tb = session.parallelize([None for i in range(shape[0])],
                                 include_key=False,
                                 partition=partition)

        tb = tb.mapValues(lambda val: self.generate_random_number(shape[1:]))

        return PaillierTensor(tb_obj=tb)
Exemple #15
0
    def forward(self, host_input, epoch=0, batch=0, train=True):
        if batch >= len(self.train_encrypted_calculator):
            self.train_encrypted_calculator.append(
                self.generated_encrypted_calculator())

        LOGGER.info(
            "forward propagation: encrypt host_bottom_output of epoch {} batch {}"
            .format(epoch, batch))
        host_input = PaillierTensor(ori_data=host_input,
                                    partitions=self.partitions)

        encrypted_host_input = host_input.encrypt(
            self.train_encrypted_calculator[batch])
        self.send_host_encrypted_forward_to_guest(
            encrypted_host_input.get_obj(), epoch, batch)

        encrypted_guest_forward = PaillierTensor(
            tb_obj=self.get_guest_encrypted_forwrad_from_guest(epoch, batch))

        decrypted_guest_forward = encrypted_guest_forward.decrypt(
            self.encrypter)
        if self.fixed_point_encoder:
            decrypted_guest_forward = decrypted_guest_forward.decode(
                self.fixed_point_encoder)

        if self.acc_noise is None:
            self.input_shape = host_input.shape[1]
            self.output_unit = self.get_interactive_layer_output_unit()
            self.acc_noise = np.zeros((self.input_shape, self.output_unit))

        mask_table = None
        if train and self.drop_out_keep_rate and self.drop_out_keep_rate < 1:
            mask_table = self.get_interactive_layer_drop_out_table(
                epoch, batch)

        if mask_table:
            decrypted_guest_forward_with_noise = decrypted_guest_forward + (
                host_input * self.acc_noise).select_columns(mask_table)
            self.mask_table = mask_table
        else:
            decrypted_guest_forward_with_noise = decrypted_guest_forward + (
                host_input * self.acc_noise)

        self.send_decrypted_guest_forward_with_noise_to_guest(
            decrypted_guest_forward_with_noise.get_obj(), epoch, batch)
Exemple #16
0
    def decrypt_host_data(self, epoch_idx, local_round=-1):

        inter_grad = self.transfer_variable.host_side_gradients.get(
            suffix=(epoch_idx, local_round, 'host_de_send'), idx=0)
        inter_grad_pt = PaillierTensor(tb_obj=inter_grad,
                                       partitions=self.partitions)
        self.transfer_variable.decrypted_host_gradients.remote(
            inter_grad_pt.decrypt(self.encrypter).get_obj(),
            suffix=(epoch_idx, local_round, 'host_de_get'))
Exemple #17
0
    def decrypt_inter_result(self,
                             encrypted_const,
                             grad_a_overlap,
                             epoch_idx,
                             local_round=-1):
        """
        add random mask to encrypted inter-result, get decrypted data from host add subtract random mask
        """

        rand_0 = self.rng_generator.generate_random_number(
            encrypted_const.shape)
        encrypted_const = encrypted_const + rand_0
        rand_1 = PaillierTensor(
            ori_data=self.rng_generator.generate_random_number(
                grad_a_overlap.shape),
            partitions=self.partitions)
        grad_a_overlap = grad_a_overlap + rand_1

        self.transfer_variable.guest_side_const.remote(encrypted_const,
                                                       suffix=(
                                                           epoch_idx,
                                                           local_round,
                                                       ))
        self.transfer_variable.guest_side_gradients.remote(
            grad_a_overlap.get_obj(), suffix=(
                epoch_idx,
                local_round,
            ))
        const = self.transfer_variable.decrypted_guest_const.get(suffix=(
            epoch_idx,
            local_round,
        ),
                                                                 idx=0)
        grad = self.transfer_variable.decrypted_guest_gradients.get(suffix=(
            epoch_idx,
            local_round,
        ),
                                                                    idx=0)
        const = const - rand_0
        grad_a_overlap = PaillierTensor(tb_obj=grad,
                                        partitions=self.partitions) - rand_1

        return const, grad_a_overlap
Exemple #18
0
    def update_host(self, activation_gradient, weight_gradient, acc_noise):
        activation_gradient_tensor = PaillierTensor(
            ori_data=activation_gradient, partitions=self.partitions)
        input_gradient = self.host_model.get_input_gradient(
            activation_gradient_tensor, acc_noise)
        # input_gradient = self.host_model.get_input_gradient(activation_gradient, acc_noise)

        self.host_model.update_weight(weight_gradient)
        self.host_model.update_bias(activation_gradient)

        return input_gradient
Exemple #19
0
    def encrypt_tensor(self, components, return_dtable=True):

        """
        transform numpy array into Paillier tensor and encrypt
        """

        if len(self.encrypt_calculators) == 0:
            self.encrypt_calculators = [self.generated_encrypted_calculator() for i in range(3)]
        encrypted_tensors = []
        for comp, calculator in zip(components, self.encrypt_calculators):
            encrypted_tensor = PaillierTensor(ori_data=comp, partitions=self.partitions)
            if return_dtable:
                encrypted_tensors.append(encrypted_tensor.encrypt(calculator).get_obj())
            else:
                encrypted_tensors.append(encrypted_tensor.encrypt(calculator))

        return encrypted_tensors
Exemple #20
0
    def compute_backward_gradients(self, host_components, data_loader: FTLDataLoader, epoch_idx, local_round=-1):

        """
        compute backward gradients using host components
        """

        # they are Paillier tensors or np array
        overlap_ub, overlap_ub_2, mapping_comp_b = host_components[0], host_components[1], host_components[2]

        y_overlap_2_phi = np.expand_dims(self.overlap_y_2 * self.phi, axis=1)

        if self.mode == 'plain':

            loss_grads_const_part1 = 0.25 * np.squeeze(np.matmul(y_overlap_2_phi, overlap_ub_2), axis=1)
            loss_grads_const_part2 = self.overlap_y * overlap_ub

            const = np.sum(loss_grads_const_part1, axis=0) - 0.5 * np.sum(loss_grads_const_part2, axis=0)

            grad_a_nonoverlap = self.alpha * const * data_loader.y[data_loader.get_non_overlap_indexes()] / self.data_num
            grad_a_overlap = self.alpha * const * self.overlap_y / self.data_num + mapping_comp_b

            return np.concatenate([grad_a_overlap, grad_a_nonoverlap], axis=0)

        elif self.mode == 'encrypted':

            loss_grads_const_part1 = overlap_ub_2.matmul_3d(0.25 * y_overlap_2_phi, multiply='right')
            loss_grads_const_part1 = loss_grads_const_part1.squeeze(axis=1)

            if self.overlap_y_pt is None:
                self.overlap_y_pt = PaillierTensor(self.overlap_y, partitions=self.partitions)

            loss_grads_const_part2 = overlap_ub * self.overlap_y_pt

            encrypted_const = loss_grads_const_part1.reduce_sum() - 0.5 * loss_grads_const_part2.reduce_sum()

            grad_a_overlap = self.overlap_y_pt.map_ndarray_product((self.alpha/self.data_num * encrypted_const)) + mapping_comp_b

            const, grad_a_overlap = self.decrypt_inter_result(encrypted_const, grad_a_overlap, epoch_idx=epoch_idx
                                                              , local_round=local_round)

            self.decrypt_host_data(epoch_idx, local_round=local_round)

            grad_a_nonoverlap = self.alpha * const * data_loader.y[data_loader.get_non_overlap_indexes()]/self.data_num

            return np.concatenate([grad_a_overlap.numpy(), grad_a_nonoverlap], axis=0)
Exemple #21
0
    def backward(self, epoch, batch):
        do_backward = True
        selective_ids = []
        if self.do_backward_select_strategy:
            selective_ids, do_backward = self.sync_backward_select_info(
                epoch, batch)

        if not do_backward:
            return [], selective_ids

        encrypted_guest_weight_gradient = self.get_guest_encrypted_weight_gradient_from_guest(
            epoch, batch)

        LOGGER.info("decrypt weight gradient of epoch {} batch {}".format(
            epoch, batch))
        decrypted_guest_weight_gradient = self.encrypter.recursive_decrypt(
            encrypted_guest_weight_gradient)

        noise_weight_gradient = self.rng_generator.generate_random_number(
            (self.input_shape, self.output_unit))

        decrypted_guest_weight_gradient += noise_weight_gradient / self.learning_rate

        self.send_guest_decrypted_weight_gradient_to_guest(
            decrypted_guest_weight_gradient, epoch, batch)

        LOGGER.info("encrypt acc_noise of epoch {} batch {}".format(
            epoch, batch))
        encrypted_acc_noise = self.encrypter.recursive_encrypt(self.acc_noise)
        self.send_encrypted_acc_noise_to_guest(encrypted_acc_noise, epoch,
                                               batch)

        self.acc_noise += noise_weight_gradient
        host_input_gradient = PaillierTensor(
            tb_obj=self.get_host_backward_from_guest(epoch, batch))

        host_input_gradient = host_input_gradient.decrypt(self.encrypter)

        if self.fixed_point_encoder:
            host_input_gradient = host_input_gradient.decode(
                self.fixed_point_encoder).numpy()
        else:
            host_input_gradient = host_input_gradient.numpy()

        return host_input_gradient, selective_ids