Exemple #1
0
def start_websocket_server_worker(id,
                                  host,
                                  port,
                                  hook,
                                  verbose,
                                  keep_labels=None,
                                  training=True):
    """Helper function for spinning up a websocket server and setting up the local datasets."""

    server = WebsocketServerWorker(id=id,
                                   host=host,
                                   port=port,
                                   hook=hook,
                                   verbose=verbose)

    # Setup toy data (mnist example)
    mnist_dataset = datasets.MNIST(
        root="./data",
        train=training,
        download=True,
        transform=transforms.Compose([
            transforms.ToTensor(),
            transforms.Normalize((0.1307, ), (0.3081, ))
        ]),
    )

    if training:
        indices = np.isin(mnist_dataset.targets, keep_labels).astype("uint8")
        logger.info("number of true indices: %s", indices.sum())
        selected_data = (torch.native_masked_select(
            mnist_dataset.data.transpose(0, 2),
            torch.tensor(indices)).view(28, 28, -1).transpose(2, 0))
        logger.info("after selection: %s", selected_data.shape)
        selected_targets = torch.native_masked_select(mnist_dataset.targets,
                                                      torch.tensor(indices))

        dataset = sy.BaseDataset(data=selected_data,
                                 targets=selected_targets,
                                 transform=mnist_dataset.transform)
        key = "mnist"
    else:
        dataset = sy.BaseDataset(
            data=mnist_dataset.data,
            targets=mnist_dataset.targets,
            transform=mnist_dataset.transform,
        )
        key = "mnist_testing"

    server.add_dataset(dataset, key=key)

    logger.info("datasets: %s", server.datasets)
    if training:
        logger.info("len(datasets[mnist]): %s", len(server.datasets["mnist"]))

    server.start()
    return server
def start_websocket_server_worker(id,
                                  host,
                                  port,
                                  hook,
                                  verbose,
                                  keep_labels=None,
                                  training=True):
    """Helper function for spinning up a websocket server and setting up the local datasets."""
    d = load_cnn_virus()
    server = websocket_server.WebsocketServerWorker(id=id,
                                                    host=host,
                                                    port=port,
                                                    hook=hook,
                                                    verbose=verbose)

    if training:
        #print(d[0].shape)
        #print(mnist_dataset.data.transpose(0, 2).shape)
        indices = np.isin(d[1], keep_labels).astype("uint8")
        #print((torch.tensor(indices)).shape)
        logger.info("number of true indices: %s", indices.sum())
        selected_data = (torch.native_masked_select(d[0].transpose(
            0, 1), torch.tensor(indices)).view(470,
                                               -1).transpose(1, 0).to(device))
        logger.info("after selection: %s", selected_data.shape)
        selected_targets = torch.native_masked_select(
            d[1], torch.tensor(indices)).to(device)

        dataset = sy.BaseDataset(data=selected_data, targets=selected_targets)
        key = "mnist"
    else:
        dataset = sy.BaseDataset(
            data=d[0].to(device),
            targets=d[1].to(device),
        )
        key = "mnist_testing"

    server.add_dataset(dataset, key=key)
    count = [0] * 5
    logger.info("MNIST dataset (%s set), available numbers on %s: ",
                "train" if training else "test", id)
    for i in range(5):
        count[i] = (dataset.targets == i).sum().item()
        logger.info("      %s: %s", i, count[i])

    logger.info("datasets: %s", server.datasets)
    if training:
        logger.info("len(datasets[mnist]): %s", len(server.datasets[key]))

    server.start()
    return server
Exemple #3
0
def start_websocket_server_worker(
    id, host, port, hook, verbose, keep_labels=None, training=True
):  # pragma: no cover
    """Helper function for spinning up a websocket server and setting up the local datasets."""

    server = WebsocketServerWorker(id=id, host=host, port=port, hook=hook, verbose=verbose)

    # Setup toy data (mnist example)
    mnist_dataset = datasets.MNIST(
        root="./data",
        train=training,
        download=True,
        transform=transforms.Compose(
            [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
        ),
    )

    if training:
        indices = np.isin(mnist_dataset.targets, keep_labels).astype("uint8")
        logger.info("number of true indices: %s", indices.sum())
        selected_data = (
            torch.native_masked_select(mnist_dataset.data.transpose(0, 2), torch.tensor(indices))
            .view(28, 28, -1)
            .transpose(2, 0)
        )
        logger.info("after selection: %s", selected_data.shape)
        selected_targets = torch.native_masked_select(mnist_dataset.targets, torch.tensor(indices))

        dataset = sy.BaseDataset(
            data=selected_data, targets=selected_targets, transform=mnist_dataset.transform
        )
        key = "mnist"
    else:
        dataset = sy.BaseDataset(
            data=mnist_dataset.data,
            targets=mnist_dataset.targets,
            transform=mnist_dataset.transform,
        )
        key = "mnist_testing"

    server.add_dataset(dataset, key=key)

    # Setup toy data (vectors example)
    data_vectors = torch.tensor([[-1, 2.0], [0, 1.1], [-1, 2.1], [0, 1.2]], requires_grad=True)
    target_vectors = torch.tensor([[1], [0], [1], [0]])

    server.add_dataset(sy.BaseDataset(data_vectors, target_vectors), key="vectors")

    # Setup toy data (xor example)
    data_xor = torch.tensor([[0.0, 1.0], [1.0, 0.0], [1.0, 1.0], [0.0, 0.0]], requires_grad=True)
    target_xor = torch.tensor([1.0, 1.0, 0.0, 0.0], requires_grad=False)

    server.add_dataset(sy.BaseDataset(data_xor, target_xor), key="xor")

    # Setup gaussian mixture dataset
    data, target = utils.create_gaussian_mixture_toy_data(nr_samples=100)
    server.add_dataset(sy.BaseDataset(data, target), key="gaussian_mixture")

    # Setup partial iris dataset
    data, target = utils.iris_data_partial()
    dataset = sy.BaseDataset(data, target)
    dataset_key = "iris"
    server.add_dataset(dataset, key=dataset_key)

    logger.info("datasets: %s", server.datasets)
    if training:
        logger.info("len(datasets[mnist]): %s", len(server.datasets["mnist"]))

    server.start()
    return server
Exemple #4
0
def create_app(node_id, debug=False, database_url=None, data_dir: str = None):
    """ Create / Configure flask socket application instance.
        
        Args:
            node_id (str) : ID of Grid Node.
            debug (bool) : debug flag.
            test_config (bool) : Mock database environment.
        Returns:
            app : Flask application instance.
    """
    app = Flask(__name__)
    app.debug = debug

    app.config["SECRET_KEY"] = "justasecretkeythatishouldputhere"

    # Enable persistent mode
    # Overwrite syft.object_storage methods to work in a persistent way
    # Persist models / tensors
    if database_url:
        app.config["REDISCLOUD_URL"] = database_url
        from .main.persistence import database, object_storage

        db_instance = database.set_db_instance(database_url)
        object_storage.set_persistent_mode(db_instance)

    from .main import html, ws, hook, local_worker, auth

    # Global socket handler
    sockets = Sockets(app)

    # set_node_id(id)
    local_worker.id = node_id
    hook.local_worker._known_workers[node_id] = local_worker
    local_worker.add_worker(hook.local_worker)

    # add data
    if data_dir:
        print("register data")
        if "mnist" in data_dir.lower():
            dataset = MNIST(
                root="./data",
                train=True,
                download=True,
                transform=transforms.Compose(
                    [transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
                ),
            )
            if node_id in KEEP_LABELS_DICT:
                indices = np.isin(dataset.targets, KEEP_LABELS_DICT[node_id]).astype(
                    "uint8"
                )
                selected_data = (
                    torch.native_masked_select(  # pylint:disable=no-member
                        dataset.data.transpose(0, 2),
                        torch.tensor(indices),  # pylint:disable=not-callable
                    )
                    .view(28, 28, -1)
                    .transpose(2, 0)
                )
                selected_targets = torch.native_masked_select(  # pylint:disable=no-member
                    dataset.targets,
                    torch.tensor(indices),  # pylint:disable=not-callable
                )
            """ dataset = sy.BaseDataset(
                    data=selected_data,
                    targets=selected_targets,
                    transform=dataset.transform,
                )
            dataset_name = "mnist"
            """
        else:

            train_tf = [
                transforms.RandomVerticalFlip(p=0.5),
                transforms.RandomAffine(
                    degrees=30,
                    translate=(0, 0),
                    scale=(0.85, 1.15),
                    shear=10,
                    #    fillcolor=0.0,
                ),
                transforms.Resize(224),
                transforms.RandomCrop(224),
                transforms.ToTensor(),
                transforms.Normalize((0.57282609,), (0.17427578,)),
                # transforms.RandomApply([AddGaussianNoise(mean=0.0, std=0.05)], p=0.5),
            ]
            """train_tf.append(
                transforms.Lambda(
                    lambda x: torch.repeat_interleave(  # pylint: disable=no-member
                        x, 3, dim=0
                    )
                )
            )"""
            target_dict_pneumonia = {0: 1, 1: 0, 2: 2}
            dataset = ImageFolder(
                data_dir,
                transform=transforms.Compose(train_tf),
                target_transform=lambda x: target_dict_pneumonia[x],
            )
            data, targets = [], []
            for d, t in tqdm(dataset, total=len(dataset)):
                data.append(d)
                targets.append(t)
            selected_data = torch.stack(data)  # pylint:disable=no-member
            selected_targets = torch.from_numpy(np.array(targets))  # pylint:disable=no-member
            #dataset = sy.BaseDataset(data=data, targets=targets)
            """dataset = PPPP(
                "data/Labels.csv",
                train=True,
                transform=transforms.Compose(train_tf),
                seed=1
            )"""
            dataset_name = "pneumonia"

        local_worker.register_obj(selected_data, "data")
        local_worker.register_obj(selected_targets, "targets")

        print("registered data")

    # Register app blueprints
    app.register_blueprint(html, url_prefix=r"/")
    sockets.register_blueprint(ws, url_prefix=r"/")

    # Set Authentication configs
    app = auth.set_auth_configs(app)

    return app

model = Net()

keep_labels = KEEP_LABELS_DICT[worker_id]
mnist_dataset = datasets.MNIST(
    root="./data",
    train=training,
    download=True,
    transform=transforms.Compose(
        [transforms.ToTensor(),
         transforms.Normalize((0.1307, ), (0.3081, ))]),
)
indices = np.isin(mnist_dataset.targets, keep_labels).astype("uint8")
selected_data = (th.native_masked_select(mnist_dataset.data.transpose(0, 2),
                                         th.tensor(indices)).view(
                                             28, 28, -1).transpose(2, 0))
selected_targets = th.native_masked_select(mnist_dataset.targets,
                                           th.tensor(indices))
dataset = sy.BaseDataset(data=selected_data,
                         targets=selected_targets,
                         transform=mnist_dataset.transform)

dataset = sy.BaseDataset(data=selected_data,
                         targets=selected_targets,
                         transform=mnist_dataset.transform)

trainloader = th.utils.data.DataLoader(dataset, batch_size=64, shuffle=True)

optimizer = optim.SGD(model.parameters(), lr=0.001)