Пример #1
0
    def execute_command(self, message: tuple) -> PointerTensor:
        """
        Executes commands received from other workers.

        Args:
            message: A tuple specifying the command and the args.

        Returns:
            A pointer to the result.
        """

        (command_name, _self, args, kwargs), return_ids = message

        # TODO add kwargs
        command_name = command_name
        # Handle methods
        if _self is not None:
            if sy.torch.is_inplace_method(command_name):
                getattr(_self, command_name)(*args, **kwargs)
                return
            else:
                response = getattr(_self, command_name)(*args, **kwargs)
        # Handle functions
        else:
            # At this point, the command is ALWAYS a path to a
            # function (i.e., torch.nn.functional.relu). Thus,
            # we need to fetch this function and run it.

            sy.torch.command_guard(command_name, "torch_modules")

            paths = command_name.split(".")
            command = self
            for path in paths:
                command = getattr(command, path)

            response = command(*args, **kwargs)

        # some functions don't return anything (such as .backward())
        # so we need to check for that here.
        if response is not None:
            # Register response et create pointers for tensor elements
            try:
                response = sy.frameworks.torch.hook_args.register_response(
                    command_name, response, list(return_ids), self)
                return response
            except ResponseSignatureError:
                return_ids = IdProvider(return_ids)
                response = sy.frameworks.torch.hook_args.register_response(
                    command_name, response, return_ids, self)
                raise ResponseSignatureError(return_ids.generated)
Пример #2
0
    "Plan",
    "codes",
    "LoggingTensor",
    "PointerTensor",
    "VirtualGrid",
    "ObjectWrapper",
    "LargePrecisionTensor",
]

local_worker = None
torch = None

if "ID_PROVIDER" not in globals():
    from syft.generic import IdProvider

    ID_PROVIDER = IdProvider()


def create_sandbox(gbs, verbose=True, download_data=True):
    """There's some boilerplate stuff that most people who are
    just playing around would like to have. This will create
    that for you"""

    try:
        torch = gbs["torch"]
    except:
        torch = gbs["th"]

    global hook
    global bob
    global theo