Exemple #1
0
    def __apply_private_op(
        self, y: "MPCTensor", op_str: str, kwargs_: Dict[Any, Any]
    ) -> "MPCTensor":
        """Apply an operation on 2 MPCTensor (secret shared values).

        Args:
            y (MPCTensor): Tensor to apply the operation
            op_str (str): The operation
            kwargs_ (dict): Kwargs for some operations like conv2d

        Returns:
            MPCTensor. The operation "op_str" applied on "self" and "y"

        Raises:
            ValueError: If session from MPCTensor and "y" is not the same.
        """
        if y.session.uuid != self.session.uuid:
            raise ValueError(
                f"Need same session {self.session.uuid} and {y.session.uuid}"
            )

        if op_str in {"mul", "matmul", "conv2d", "conv_transpose2d"}:
            from sympc.protocol.spdz import spdz

            result = spdz.mul_master(self, y, op_str, kwargs_)
            result.shape = MPCTensor._get_shape(op_str, self.shape, y.shape)
        elif op_str in {"sub", "add"}:
            op = getattr(operator, op_str)
            shares = [
                op(*share_tuple) for share_tuple in zip(self.share_ptrs, y.share_ptrs)
            ]

            result = MPCTensor(shares=shares, shape=self.shape, session=self.session)

        return result
Exemple #2
0
    def __apply_private_op(self, y: "MPCTensor", op_str: str) -> "MPCTensor":
        """Apply an operation on 2 MPCTensor (secret shared values)

        Args:
            y (MPCTensor): tensor to apply the operation
            op_str (str): the operation

        Returns:
            MPCTensor. The operation "op_str" applied on "self" and "y"
        """

        if y.session.uuid != self.session.uuid:
            raise ValueError(
                f"Need same session {self.session.uuid} and {y.session.uuid}")

        if op_str in {"mul", "matmul"}:
            from sympc.protocol.spdz import spdz

            result = spdz.mul_master(self, y, op_str)
        elif op_str in {"sub", "add"}:
            op = getattr(operator, op_str)
            shares = [
                op(*share_tuple)
                for share_tuple in zip(self.share_ptrs, y.share_ptrs)
            ]

            result = MPCTensor(shares=shares, session=self.session)

        return result
Exemple #3
0
    def __apply_private_op(self, y: "MPCTensor", op_str: str,
                           kwargs_: Dict[Any, Any]) -> "MPCTensor":
        """Apply an operation on 2 MPCTensor (secret shared values).

        Args:
            y (MPCTensor): Tensor to apply the operation
            op_str (str): The operation
            kwargs_ (dict): Kwargs for some operations like conv2d

        Returns:
            MPCTensor. The operation "op_str" applied on "self" and "y"

        Raises:
            ValueError: If session from MPCTensor and "y" is not the same.
            TypeError: If MPC tensors are not of same share class
            NotImplementedError: When op has not been implemented yet
        """
        if self.session.protocol.share_class != y.session.protocol.share_class:
            raise TypeError("Both MPC tensors should be of same share class.")

        if y.session.uuid != self.session.uuid:
            raise ValueError(
                f"Need same session {self.session.uuid} and {y.session.uuid}")

        if op_str in TRUNCATED_OPS:
            from sympc.protocol import Falcon
            from sympc.protocol.spdz import spdz
            from sympc.tensor import ReplicatedSharedTensor

            if self.session.protocol.share_class == ShareTensor:
                result = spdz.mul_master(self, y, op_str, kwargs_)
                result.shape = MPCTensor._get_shape(op_str, self.shape,
                                                    y.shape)

            elif self.session.protocol.share_class == ReplicatedSharedTensor:
                if op_str in {"mul", "matmul"}:
                    result = Falcon.mul_master(self, y, self.session, op_str,
                                               kwargs_)
                    result.shape = MPCTensor._get_shape(
                        op_str, self.shape, y.shape)
                else:
                    raise NotImplementedError(
                        f"{op_str} has not implemented for ReplicatedSharedTensor"
                    )

            else:
                raise TypeError("Invalid Share Class")

        elif op_str == "xor":
            ring_size = int(self.share_ptrs[0].get_ring_size().get_copy())
            if ring_size == 2:
                return self + y
            else:
                return self + y - (self * y * 2)

        elif op_str in {"sub", "add"}:

            op = getattr(operator, op_str)
            shares = [
                op(*share_tuple)
                for share_tuple in zip(self.share_ptrs, y.share_ptrs)
            ]

            result = MPCTensor(shares=shares,
                               shape=self.shape,
                               session=self.session)

        return result