Exemple #1
0
def initialization(*args):
    """
    :param:tuple : (b_context, b_X, b_X,keys)
            b_context: binary representation of the context_. context_.serialize()$
            b_X : list of binary representations of samples from CKKS vectors format
            b_Y : list of binary representations of labels from CKKS vectors format
            keys : keys of the samples which are passed to the subprocess. the local b_X[i] is the global X[keys[i]]. Useful to map predictions to true labels 
    This function is the first one to be passed in the input_queue queue of the process.
    It first deserialize the context_, passing it global,
    in the memory space allocated to the process
    Then the batch is also deserialize, using the context_,
    to generate a list of CKKS vector which stand for the encrypted samples on which the process will work
    """
    b_context = args[0]
    b_X = args[1]
    b_Y = args[2]
    global context_
    context_ = ts.context_from(b_context)
    global local_X_
    global local_Y_
    local_X_ = [ts.ckks_vector_from(context_, i) for i in b_X]
    local_Y_ = [ts.ckks_vector_from(context_, i) for i in b_Y]
    global local_keys
    local_keys = args[3]
    return 'Initialization done for process %s. Len of data : %i' % (
        multiprocessing.current_process().name, len(local_X_))
def load_dataset(
    dataset_id: str,
) -> Tuple[
    ts._ts_cpp.TenSEALContext,
    List[ts._ts_cpp.CKKSVector],
    List[ts._ts_cpp.CKKSVector],
    int,
]:
    """Load a dataset into CKKSVectors"""
    ctx_id, X, Y, batch_size = get_raw_dataset(dataset_id)
    ctx = load_context(ctx_id)
    enc_X = [ts.ckks_vector_from(ctx, x) for x in X]
    enc_Y = [ts.ckks_vector_from(ctx, y) for y in Y]
    return ctx, enc_X, enc_Y, batch_size
    def get_dataset(
        self, dataset_id: str, context: ts._ts_cpp.TenSEALContext
    ) -> Tuple[str, List[ts._ts_cpp.CKKSVector], int]:
        """Get a previously registered dataset using its id

        Args:
            dataset_id: id referring to the previously saved dataset
            context: TenSEALContext used to load the dataset

        Returns:
            (context_id, X, Y, batch_size)

        Raises:
            ConnectionError: if a connection can't be established with the API
            ResourceNotFound: if the dataset identified with `dataset_id` can't be found
            Answer418: if response.status_code is 418
            ServerError: if response.status_code is 500
        """

        url = self._base_url + f"/datasets/"
        data = {"dataset_id": dataset_id}

        try:
            response = requests.get(url, params=data)
        except requests.exceptions.ConnectionError:
            raise ConnectionError

        if response.status_code != 200:
            Client._handle_error_response(response)

        resp_json = response.json()
        ctx_id = resp_json["context_id"]
        batch_size = resp_json["batch_size"]
        enc_X, enc_Y = [], []
        for x_buff, y_buff in zip(resp_json["X"], resp_json["Y"]):
            x = ts.ckks_vector_from(context, b64decode(x_buff))
            y = ts.ckks_vector_from(context, b64decode(y_buff))
            enc_X.append(x)
            enc_Y.append(y)

        return ctx_id, enc_X, enc_Y, batch_size
    def evaluate(
        self,
        model_name: str,
        context: Union[ts._ts_cpp.TenSEALContext, bytes],  # serialized or not
        ckks_vector: Union[ts._ts_cpp.CKKSVector, bytes],  # serialized or not
    ) -> ts._ts_cpp.CKKSVector:
        """Evaluate model `model_name` on the encrypted input data `ckks_vector`

        Args:
            model_name: the model name to use for evaluation
            context: TenSEALContext containing keys needed for evaluation
            ckks_vector: encrypted input to feed the model with

        Returns:
            tenseal.CKKSVector: encrypted output of the evaluation

        Raises:
            ConnectionError: if a connection can't be established with the API
            Answer418: if response.status_code is 418
            ServerError: if response.status_code is 500
        """

        url = self._base_url + f"/eval/{model_name}"

        # don't serialize if already
        if not isinstance(context, bytes):
            ser_ctx = context.serialize()
        else:
            ser_ctx = context
        if not isinstance(ckks_vector, bytes):
            ser_vec = ckks_vector.serialize()
        else:
            ser_vec = ckks_vector

        data = {
            "context": b64encode(ser_ctx).decode(),
            "ckks_vector": b64encode(ser_vec).decode(),
        }

        try:
            response = requests.post(url, json=data)
        except requests.exceptions.ConnectionError:
            raise ConnectionError

        if response.status_code != 200:
            Client._handle_error_response(response)

        ser_result = response.json()["ckks_vector"]
        result = ts.ckks_vector_from(context, b64decode(ser_result))
        return result
Exemple #5
0
    def prepare_input(context, ckks_vector):
        try:
            ctx = ts.context_from(context)
            enc_x = ts.ckks_vector_from(ctx, ckks_vector)
        except:
            raise DeserializationError(
                "cannot deserialize context or ckks_vector")

        # TODO: replace this with a more flexible check when introduced in the API
        try:
            _ = ctx.galois_keys()
        except:
            raise InvalidContext("the context doesn't hold galois keys")

        return enc_x
    def prepare_input(context: bytes, ckks_vector: bytes) -> ts._ts_cpp.CKKSVector:
        # TODO: check parameters or size and raise InvalidParameters when needed
        try:
            ctx = ts.context_from(context)
            enc_x = ts.ckks_vector_from(ctx, ckks_vector)
        except:
            raise DeserializationError("cannot deserialize context or ckks_vector")

        # TODO: replace this with a more flexible check when introduced in the API
        try:
            _ = ctx.galois_keys()
        except:
            raise InvalidContext("the context doesn't hold galois keys")

        return enc_x
Exemple #7
0
def load_ctx_and_input(
    context_file: typer.FileBinaryRead,
    input_file: typer.FileBinaryRead = None
) -> Tuple[ts._ts_cpp.TenSEALContext, ts._ts_cpp.CKKSVector]:
    try:
        ctx = ts.context_from(context_file.read())
    except Exception as e:
        typer.echo(f"Couldn't load context: {str(e)}", err=True)
        raise typer.Exit(code=1)

    # only load context
    if input_file is None:
        return ctx, None

    try:
        enc_input = ts.ckks_vector_from(ctx, input_file.read())
    except Exception as e:
        typer.echo(f"Couldn't load encrypted input: {str(e)}", err=True)
        raise typer.Exit(code=1)
    return ctx, enc_input
def recreate_ckks(vec):
    vec_proto = vec.serialize()
    return ts.ckks_vector_from(vec.context(), vec_proto)
Exemple #9
0
def forward_backward(*args):
    """
        Compute the forward then backward propagation on the local part of the dataset (part of the process)

        :param args: arg[0] serialized weight, arg[1] serialized bias
        :return: serialized grad of weight.
                 serialized grad of bias.
                 serialized predictions on the local part of the dataset, computed by the forward propagation.
                 list of key, int which stand for the id of the data computed (useful for map the predictions)
    """
    b_weight = args[0]
    b_bias = args[1]

    def sigmoid(enc_x, mult_coeff=1):
        """
            Sigmoid implementation
            We use the polynomial approximation of degree 3
            sigmoid(x) = 0.5 + 0.197 * x - 0.004 * x^3
            from https://eprint.iacr.org/2018/462.pdf

            :param enc_x:
            :param mult_coeff: The return is equivalent to sigmoid(x) * mult_coeff, but save one homomorph multiplication
            :return: CKKS vector (result of sigmoid(x))
        """
        poly_coeff = [0.5, 0.197, 0, -0.004]
        return enc_x.polyval([i * mult_coeff for i in poly_coeff])

    def forward(local_weight, local_bias, vec, mult_coeff=1):
        """
            Compute forward propagation on a CKKS vector (or a list of CKKS vectors)
            :param local_bias: encrypted bias used in forward propagation
            :param local_weight: encrypted weight used in forward propagation
            :param vec: CKKS vector or list of CKKS vector on which we want to make local_predictions (ie forward propagation
            :param mult_coeff: The return is equivalent to forward(x) * mult_coeff, but save one homomorph multiplication
            :return: encrypted prediction or list of encrypted local_predictions
        """

        if type(vec) == list:
            temp = [i.dot(local_weight) + local_bias for i in vec]
            return [sigmoid(i, mult_coeff=mult_coeff) for i in temp]
        else:
            res = vec.dot(local_weight) + local_bias
            return sigmoid(res, mult_coeff=mult_coeff)

    def backward(X, local_predictions, Y):
        """
            Compute the backpropagation on a given encrypted batch
            :param X: list of encrypted (CKKS vectors). Features of the data on which the gradient will be computed (backpropagation)
            :param local_predictions: list of encrypted CKKS vectors. Label predictions (forward propagation) on the data on which the gradient will be computed (backpropagation)
            :param Y: list of encrypted CKKS vectors. Label of the data on which the gradient will be computed (backpropagation)
            :return: Tuple of 2 CKKS vectors. Encrypted direction of descent for weight and bias"
        """
        if type(X) == list:
            err = local_predictions[0] - Y[0]
            grad_weight = X[0] * err
            grad_bias = err
            for i in range(1, len(X)):
                err = local_predictions[i] - Y[i]
                grad_weight += X[i] * err
                grad_bias += err
            return grad_weight, grad_bias
        else:
            err = local_predictions - Y
            grad_weight = X * err
            grad_bias = err

            return grad_weight, grad_bias

    bias = ts.ckks_vector_from(context_, b_bias)
    weight = ts.ckks_vector_from(context_, b_weight)

    predictions = forward(local_bias=bias, local_weight=weight, vec=local_X_)
    grads = backward(local_X_, predictions, local_Y_)
    b_grad_weight = grads[0].serialize()
    b_grad_bias = grads[1].serialize()
    b_predictions = [prediction.serialize() for prediction in predictions]
    return b_grad_weight, b_grad_bias, b_predictions, local_keys
Exemple #10
0
    def fit(self, X, Y):
        """
        Train the model over encrypted data.

        :param X: list of CKKS vectors: encrypted samples (train set)
        :param Y: list of CKKS vectors: encrypted labels (train set)

        """
        keys = [[] for _ in range(self.n_jobs)]
        b_X = [[] for _ in range(self.n_jobs)]
        b_Y = [[] for _ in range(self.n_jobs)]

        self.logger.info("Starting serialization of data")
        ser_time = time.time()
        for i in range(len(X)):
            b_X[i % self.n_jobs].append(X[i].serialize())
            b_Y[i % self.n_jobs].append(Y[i].serialize())
            keys[i % self.n_jobs].append(i)
        self.logger.info("Data serialization done in %s seconds" %
                         (time.time() - ser_time))
        inv_n = (1 / len(Y))
        self.logger.info("Initialization of %d workers" % self.n_jobs)
        init_work_timer = time.time()
        list_queue_in = []
        queue_out = multiprocessing.Queue()
        init_tasks = [(initialization, (self.b_context, x, y, key))
                      for x, y, key in zip(b_X, b_Y, keys)]
        for init in init_tasks:
            list_queue_in.append(multiprocessing.Queue())
            list_queue_in[-1].put(init)
            multiprocessing.Process(target=worker,
                                    args=(list_queue_in[-1],
                                          queue_out)).start()
        log_out = []
        for _ in range(self.n_jobs):
            log_out.append(queue_out.get())
            logging.info(log_out[-1])
        self.logger.info("Initialization done in %s seconds" %
                         (time.time() - init_work_timer))

        del b_X
        del b_Y

        while self.iter < self.num_iter:

            timer_iter = time.time()

            self.weight = self.refresh(self.weight)
            self.bias = self.refresh(self.bias)
            b_weight = self.weight.serialize()
            b_bias = self.bias.serialize()

            for q in list_queue_in:
                q.put((forward_backward, (
                    b_weight,
                    b_bias,
                )))

            direction_weight, direction_bias = 0, 0
            b_predictions = [0 for _ in range(len(X))]
            for _ in range(self.n_jobs):
                child_process_ans = queue_out.get()
                direction_weight += ts.ckks_vector_from(
                    self.context, child_process_ans[0])
                direction_bias += ts.ckks_vector_from(self.context,
                                                      child_process_ans[1])
                for pred, key in zip(child_process_ans[2],
                                     child_process_ans[3]):
                    b_predictions[key] = pred

            direction_weight = (direction_weight * self.lr *
                                inv_n) + (self.weight *
                                          (self.lr * inv_n * self.reg_para))
            direction_bias = direction_bias * self.lr * inv_n + (
                self.bias * (self.lr * inv_n * self.reg_para))

            self.weight -= direction_weight
            self.bias -= direction_bias

            self.logger.info(
                "Just finished iteration number %d in  %s seconds. " %
                (self.iter, time.time() - timer_iter))

            if self.verbose > 0 and self.iter % self.verbose == 0:
                self.weight = self.refresh(self.weight)
                self.bias = self.refresh(self.bias)
                self.loss_list.append(
                    self.loss(Y, (ts.ckks_vector_from(self.context, prediction)
                                  for prediction in b_predictions)))
                self.logger.info("Starting computation of the loss ...")
                self.logger.info('Loss : ' + str(self.loss_list[-1]) + ".")
            if self.save_weight > 0 and self.iter % self.save_weight == 0:
                self.weight_list.append(self.weight)
                self.bias_list.append(self.bias)

            self.iter += 1

        for q in list_queue_in:
            q.put('STOP')

        self.weight = self.refresh(self.weight)
        self.bias = self.refresh(self.bias)

        return self
Exemple #11
0
def client_deserialize(ctx, result):
    if (not isinstance(result, list)):
        return ckks_vector_from(ctx, b64decode(result))
    else:
        return [client_deserialize(ctx, e) for e in result]
Exemple #12
0
def deserialize_decrypt(ctx, vector, secret_key):
    if (isinstance(vector, list)):
        return [deserialize_decrypt(ctx, e, secret_key) for e in vector]
    else:
        return ckks_vector_from(ctx, b64decode(vector)).decrypt(secret_key)
Exemple #13
0
 def unpack(self, data_file):
     with open('{}.bin'.format(data_file), 'rb') as f:
         data_serial = f.read()
     return ts.ckks_vector_from(self.public_context, data_serial)
Exemple #14
0
 def receive_results(self, result_file, flag_file):
     with open('{}.bin'.format(result_file), 'rb') as f:
         res_serial = f.read()
     with open('{}.bin'.format(flag_file), 'rb') as f:
         flag = bool(f.read())
     return flag, ts.ckks_vector_from(self.public_context, res_serial)
import tenseal as ts
import time
import p2p

bin_name = 'vectorReceive.bin'
context_name = 'contextReceive.bin'

receiver = p2p.Receiver('127.0.0.1', 8080)
receiver.receive(bin_name)
receiver = p2p.Receiver('127.0.0.1', 8080)
receiver.receive(context_name)

with open(bin_name, 'rb') as f:
    vector_enc_bin = f.read()

with open(context_name, 'rb') as f:
    context_bin = f.read()

context = ts.context_from(context_bin)
vector_enc = ts.ckks_vector_from(context, vector_enc_bin)

vector = vector_enc.decrypt()

print("Received vector:", vector)