示例#1
0
    def __init__(
        self,
        server_0: Optional[Player] = None,
        server_1: Optional[Player] = None,
        server_2: Optional[Player] = None,
        tensor_factory: Optional[AbstractFactory] = None,
        prime_factory: Optional[AbstractFactory] = None,
        odd_factory: Optional[AbstractFactory] = None,
        **kwargs,
    ) -> None:
        server_0 = server_0 or get_config().get_player("server0")
        server_1 = server_1 or get_config().get_player("server1")
        server_2 = (server_2 or get_config().get_player("server2")
                    or get_config().get_player("crypto-producer"))

        assert server_0 is not None
        assert server_1 is not None
        assert server_2 is not None

        super(SecureNN, self).__init__(
            server_0=server_0,
            server_1=server_1,
            triple_source=server_2,
            tensor_factory=tensor_factory,
            **kwargs,
        )
        self.server_2 = server_2

        if odd_factory is None:
            if self.tensor_factory is int64factory:
                odd_factory = oddint64_factory
            else:
                odd_factory = self.tensor_factory

        if prime_factory is None:
            prime = 107
            assert prime > math.ceil(math.log2(self.tensor_factory.modulus))
            prime_factory = native_factory(self.tensor_factory.native_type,
                                           prime)

        self.prime_factory = prime_factory
        self.odd_factory = odd_factory
        assert self.prime_factory.native_type == self.tensor_factory.native_type
        assert self.odd_factory.native_type == self.tensor_factory.native_type
示例#2
0
    def convert(
        self,
        graph_def: Any,
        input_player: Union[str, Player],
        inputter_fn: Optional[Union[TFEInputter, List[TFEInputter]]] = None,
    ) -> Any:
        """Convert a frozen GraphDef to a TFE Graph."""
        if not graph_def.node:
            raise ValueError("An empty model was passed to the converter.")

        if isinstance(input_player, str):
            input_player = get_config().get_player("input-provider")
        assert isinstance(input_player, Player)

        if inputter_fn is None:
            inputs = []
        elif isinstance(inputter_fn, (list, tuple)):
            inputs = inputter_fn
        else:
            inputs = [inputter_fn]
        inputs_iterable = enumerate(inputs)

        # Identify if there are special ops in pb file,
        # e.g. required_space_to_batch_paddings
        # If yes, identify the inputs and outputs of these special ops.
        output_name = graph_def.node[-1].name  # Assume output is last node.
        specop_dict, specop_inputs, specop_outputs = find_specops(
            graph_def, output_name
        )

        # Create a dictionary excluding all the sub ops related to
        # required_space_to_batch_paddings. Except the sub ops related to the input
        # or output of this special ops.
        pb_trimmed, graph_def = select_relevant_ops(
            specop_inputs, specop_outputs, graph_def
        )
        node_list = pb_trimmed.values()

        # If the ops are not related to the special ops, use the existing approach
        # to register them. Otherwise for the special ops replace the output from
        # the sub ops by the output from the high level operation then register.
        for node in node_list:
            if node.name not in specop_outputs:
                self._register_op(node, inputs_iterable, input_player, graph_def)

            else:
                # Register high level special operations
                for s in specop_dict:
                    # If this node is the output of the current specop, register it
                    if match_numbered_scope(
                        s, node.name, return_group=False, numbered=False
                    ):
                        self._register_specop(node, specop_dict[s])

        return self.outputs[output_name]
示例#3
0
 def __init__(self,
              config: Optional[Config] = None,
              protocol: Optional[Protocol] = None,
              player: Optional[Union[str, Player]] = None) -> None:
     self.config = config if config is not None else get_config()
     self.protocol = protocol if protocol is not None else get_protocol()
     if player is None:
         self.model_provider = self.config.get_player('model-provider')
     elif isinstance(player, str):
         self.model_provider = self.config.get_player(player)
     else:
         self.model_provider = player
     self.outputs = {}
示例#4
0
 def __init__(
     self,
     registry,
     config: Optional[Config] = None,
     protocol: Optional[Protocol] = None,
     model_provider: Optional[Union[str, Player]] = None,
 ) -> None:
     self.config = config if config is not None else get_config()
     if protocol is not None:
         tfe.set_protocol(protocol)
     if model_provider is None:
         self.model_provider = self.config.get_player("model-provider")
     elif isinstance(model_provider, str):
         self.model_provider = self.config.get_player(model_provider)
     else:
         self.model_provider = model_provider
     self.registry = registry
     self.outputs = {}
示例#5
0
    def convert(
        self,
        graph_def: Any,
        register: Dict[str, Any],
        input_player: Union[str, Player],
        inputter_fn: Optional[Union[TFEInputter, List[TFEInputter]]] = None
    ) -> Any:
        """Convert a frozen GraphDef to a TFE Graph."""
        if isinstance(input_player, str):
            input_player = get_config().get_player('input-provider')
        assert isinstance(input_player, Player)

        if inputter_fn is None:
            inputs = []
        elif isinstance(inputter_fn, list):
            inputs = inputter_fn
        else:
            inputs = [inputter_fn]
        inputs_iterable = enumerate(inputs)

        # Identify if there are special ops in pb file,
        # e.g. required_space_to_batch_paddings
        # If yes, identify the inputs and outputs of these special ops.
        output_name = graph_def.node[-1].name  # Assume output is last node.
        specop_dict, specop_inputs, specop_outputs = find_specops(
            graph_def, output_name)

        # Create a dictionary excluding all the sub ops related to required_space_to_batch_paddings
        # Except the sub ops related to the input or output of this special ops.
        pb_trimmed = select_relevant_ops(specop_inputs, specop_outputs,
                                         graph_def)
        node_list = pb_trimmed.values()

        # If the ops are not related to the special ops, use the existing approach to register them.
        # Otherwise for the special ops replace the output from the sub ops by the output from the
        # high level operation then register.
        for node in node_list:
            if node.name not in specop_outputs:

                output = strip_tensor_info(node.name)
                inputs = [strip_tensor_info(x) for x in node.input]
                if node.op == "Placeholder":
                    try:
                        _, item = inputs_iterable.__next__()
                    except StopIteration:
                        raise InvalidArgumentError(
                            "Not enough placeholders supplied")

                    x = self.protocol.define_private_input(input_player, item)
                    self.outputs[output] = x
                    continue

                self.outputs[output] = register[node.op](self, node, inputs)
            else:
                # Register high level special operations
                for s in specop_dict:
                    input_list = specop_dict[s]['inputs']
                    output_list = specop_dict[s]['outputs']

                    # Handle edge cases if the ops return multiple outputs
                    op_handler = register[specop_dict[s]['op']]

                    nodes = specop_dict[s]['interiors']
                    if not nodes:
                        nodes = node
                    outs = op_handler(self, nodes, input_list)
                    if isinstance(outs, (list, tuple)):
                        for i, x in enumerate(outs):
                            self.outputs[output_list[i]] = x
                    else:
                        self.outputs[output_list[0]] = outs

        return self.outputs[graph_def.node[-1].name]