示例#1
0
    bad = WebsocketClientWorker(id="bad",
                                port=8777,
                                host="localhost",
                                hook=hook)

    good = WebsocketClientWorker(id="good",
                                 port=8778,
                                 host="localhost",
                                 hook=hook)

    # When staring server, we already register a tensor
    # object with tab "test" in the server worker

    try:
        print("Search test with bad")
        data_pt = bad.search("test")
    except ObjectNotFoundError:
        print("Error: test is not found with bad worker!!!")

    print("Search test with good")
    data_pt = good.search("test")
    print("Test is found with good worker!!!")
    print("Here is the test data")
    print(data_pt[0].copy().get())

    # Reason:
    # search the tensor object on server worker with tensor tag
    # via the local client worker returns ERROR because
    # set_obj() on the server side does not assign the owner attribute on
    # the object.
    # This leads to the fact that, when.search() is called,
示例#2
0
import torch
from torch import optim
import syft
from syft.workers.websocket_client import WebsocketClientWorker
hook = syft.TorchHook(torch)
# create a client worker mapping to the server worker in remote machine
remote_client = WebsocketClientWorker(
    host=
    '192.168.0.102',  # the host of remote machine, the same a the Server host
    hook=hook,
    id='liuwang',
    port=8182)
print('>>> remote_client', remote_client)

# get the data pointers which point to the real data in remote machine for training model locally
features = remote_client.search(["toy", "features"])
labels = remote_client.search(["toy", "labels"])
print('>>> x:', features)
print('>>> y:', labels)

# a toy model
model = torch.nn.Linear(2, 1)
remote_model = model.copy().send(remote_client)


def train(x, y, N) -> torch.nn.Module:
    # Training Logic
    opt = optim.SGD(params=remote_model.parameters(), lr=0.1)
    for iter in range(N):
        # 1) erase previous gradients (if they exist)
        opt.zero_grad()