Example #1
0
    def process_message_type(self, message_wrapper):
        """This method takes a message wrapper and attempts to process it
        agaist known processing methods. If the method is a composite message,
        it unroles applies recursively.

        * **message_wrapper (dict)** Dictionary containing the message
          and meta information

        * **out (object, bool)** the response. This can be a variety
          of object types. However, the object is typically only
          used during testing or local development with
          :class:`VirtualWorker` workers. The bool specifies if the
          response is private or not (private: we don't encode the data
          but juts info on the tensor; not private: we transmit data to be
          acquired by the receiver)
        """

        # the contents of the message
        message = message_wrapper["message"]

        # this series of if/else statements uses the message_wrapper['type']
        # value to determine where to route the incoming message.

        # if the message contains an object being sent to us
        if message_wrapper["type"] == "obj":

            object = message

            # if object is a numpy array
            if isinstance(message, np.ndarray):
                """do nothing."""

            # if object is a Torch object - pre-process it for registration
            else:
                torch_utils.fix_chain_structure(object)

            # register the object, saving it in self._objects and ensuring that
            # object.owner is set correctly
            self.register(object)

            # we do not send a response back
            # TODO: send a "successful" or "not successful" response?
            return {}, False

        # if the message contains Receiving a request for an object
        # to be sent to another worker. For example "x.get()" would execute here.
        # if x is a pointer to an object hosted on this worker.
        elif message_wrapper["type"] == "req_obj":

            # Because it was pointed at, it's the first syft_object of the chain,
            # so its parent is the tensorvar
            object = self.get_obj(message)

            # if object being returned is a numpy array
            if isinstance(object, np.ndarray):
                """"""
                # delete the numpy array from our local registry
                self.de_register(object)

                # send the numpy array back to the worker that asked for it
                return object, False

            # object is a pytorch array
            else:

                # if the object is NOT a variable, then we simply
                # take the object's parent, and return the entire object
                # all children will be serialized recursively
                tensorvar = object.parent

                # if the object is a variable, we have to make special
                # considerations to ensure that the data and grad are all
                # properly deregistered
                if torch_utils.is_variable_name(object.torch_type):
                    syft_data_object = tensorvar.data.child
                    self.de_register(syft_data_object)
                    if tensorvar.grad is not None:
                        syft_grad_object = tensorvar.grad.child
                        self.de_register(syft_grad_object)
                        syft_grad_data_object = tensorvar.grad.data.child
                        self.de_register(syft_grad_data_object)

                # deregister the object
                self.de_register(object)

                # return the object
                # False means we're actually return the data (it's not private)
                return tensorvar, False

        #  A torch command from another worker involving one or more tensors
        #  hosted locally. For example: "z = x + y" would execute here.
        elif message_wrapper["type"] == "torch_cmd":

            # route the command to the torch command logic
            result = self.process_torch_command(message)

            # save the results locally in self._objects
            self.register(result)

            # return result of torch operation
            # Result is private - so only actually return a pointer to the result
            return result, True

        # a numpy command from another worker involving one or more local numpy arrays
        # hosted locally. For example "z = x + y" would execute here.
        elif message_wrapper["type"] == "numpy_cmd":

            # route the command to the numpy command logic
            result = self.process_numpy_command(message)

            # save the result locally in self._objects and ensure that
            # .owner is set correctly.
            self.register(result)

            # return teh result of the numpy operation
            # Result is private - so only actually return a pointer to result
            return result, True

        # A composite command. Must be unrolled
        elif message_wrapper["type"] == "composite":
            raise NotImplementedError("Composite command not handled at the moment")

        # a message asking for a list of tensors which fit a certain criteria.
        # at the time of writing this comment, this is a partial string match on the id
        # of the tensor. For example, if self._workers has a tensor with an id
        # "12345 #boston_housing #input" then a query from a pointer to this worker of
        # bob.search("#boston_housing") would return a list of pointers including
        # the one with the "12345 #boston_housing #input" id.
        elif message_wrapper["type"] == "query":

            # perform the search over all tensors on the worker
            tensors = self.search(message)

            # return the list of pointers.
            return tensors, True

        # Hopefully we don't reach this point.
        return "Unrecognized message type:" + message_wrapper["type"]
Example #2
0
    def send_obj(
        self,
        object,
        new_id,
        recipient,
        new_data_id=None,
        new_grad_id=None,
        new_grad_data_id=None,
    ):
        """send_obj(self, obj, new_id, recipient, new_data_id=None) -> obj
        Sends an object to another :class:`VirtualWorker` and removes it from
        the local worker.

        :Parameters:
        * **object (object)** a python object to be sent
        * **new_id (int)** the id where the object should be stored
        * **recipient (** :class:`VirtualWorker` **)** the worker object to send the message to.
        """

        # if the object is a torch object, run some special checks, otherwise just set the ID
        if hasattr(object, "child"):
            object.child.id = new_id

            if torch_utils.is_variable_name(object.child.torch_type):
                if (
                    new_data_id is None
                    or new_grad_id is None
                    or new_grad_data_id is None
                ):
                    raise AttributeError(
                        (
                            "Please provide the new_data_id, new_grad_id, and "
                            "new_grad_data_id args, to be able to point to Var.data, .grad"
                        )
                    )

                if self.get_pointer_to(recipient, new_data_id) is not None:
                    raise MemoryError("You already point at ", recipient, ":", new_id)

                err_msg = "You can't have the same id for {} and {}."
                assert new_id != new_data_id, err_msg.format("var", "var.data")
                assert new_id != new_grad_id, err_msg.format("var", "var.grad")
                assert new_id != new_grad_data_id, err_msg.format(
                    "var", "var.grad.data"
                )
                assert new_data_id != new_grad_id, err_msg.format(
                    "var.data", "var.grad"
                )
                assert new_data_id != new_grad_data_id, err_msg.format(
                    "var.data", "var.grad.data"
                )
                assert new_grad_id != new_grad_data_id, err_msg.format(
                    "var.grad", "var.grad.data"
                )

                object.data.child.id = new_data_id

                if object.grad is None:
                    object.init_grad_()

                object.grad.child.id = new_grad_id
                object.grad.data.child.id = new_grad_data_id
        else:
            object.id = new_id

        if self.get_pointer_to(recipient, new_id) is not None:
            raise MemoryError("You already point at ", recipient, ":", new_id)

        if self is recipient:
            raise MemoryError(
                (
                    "The recipient {} is the same as the owner {} of the object {}"
                    "that you are trying to send"
                ).format(recipient, self, object.id)
            )

        object = encode.encode(object, retrieve_pointers=False, private_local=False)

        # We don't need any response to proceed to registration
        self.send_msg(message=object, message_type="obj", recipient=recipient)