def test_blockchain_gateway_filters_sessions(blockchain_gateway,
                                             communication_manager):
    """
    Ensures that the gateway won't intercept messages not intended for it
    """
    serialized_job = make_serialized_job()
    new_session_event = {
        "optimizer_params": "",
        "serialized_job": serialized_job
    }
    tx_receipt = setter(blockchain_gateway._client, {
        "dataset_uuid": 5678,
        "label_column_name": "label"
    },
                        blockchain_gateway._port,
                        0,
                        new_session_event,
                        flag=True)
    assert tx_receipt
    blockchain_gateway._listen(blockchain_gateway._handle_new_session_creation,
                               blockchain_gateway._filter_new_session)
    # at this point we should listen for decentralized learning
    # not hear it (filter_new_session() == False)
    # and therefore not update our communication manager
    assert communication_manager.dummy_msg_type == "None", \
        "Shouldn't have heard anything but heard a message with uuid {}".format(
            communication_manager.job_data["dataset_uuid"])
    assert communication_manager.data_provider_info == "None", \
        "Shouldn't have heard anything!"
    assert communication_manager.job_info == "None", \
        "Shouldn't have heard anything!"
def test_blockchain_gateway_can_listen_decentralized_learning(
        blockchain_gateway, communication_manager):
    """
    Uses Mock Communication Manager to ensure that the Gateway
    can listen for decentralized learning.
    """
    serialized_job = make_serialized_job()
    new_session_event = {
        "optimizer_params": "this cannot be empty",
        "serialized_job": serialized_job
    }
    tx_receipt = setter(blockchain_gateway._client, {
        "dataset_uuid": 1357,
        "label_column_name": "label"
    }, blockchain_gateway._port, 0, new_session_event, True)
    assert tx_receipt
    blockchain_gateway._listen(blockchain_gateway._handle_new_session_creation,
                               blockchain_gateway._filter_new_session)
    # at this point we should listen for decentralized learning
    # hear it (filter_new_session() == True)
    # and update our communication manager
    assert communication_manager.dummy_msg_type == RawEventTypes.NEW_MESSAGE.name, \
        "Wrong msg_type"
    assert communication_manager.data_provider_info == {
        "dataset_uuid": 1357,
        "label_column_name": "label"
    }
    communication_manager.reset()
Exemple #3
0
def initialization_payload(small_filepath):
    return {
        "optimizer_params": {
            "listen_bound": 2,
            "listen_iterations": 0
        },
        "serialized_job": make_serialized_job(small_filepath)
    }
def new_session_event(mnist_uuid, mnist_uuid_two):
    serialized_job = make_serialized_job()
    new_session_event = {
        "optimizer_params": {
            "num_averages_per_round": 1,
            "max_rounds": 2,
            "optimizer_type": "FEDERATED_AVERAGING"
        },
        "serialized_job": serialized_job,
        "participants": [mnist_uuid, mnist_uuid_two]
    }
    return new_session_event
Exemple #5
0
def new_session_event(mnist_filepath):
    serialized_job = make_serialized_job(mnist_filepath)
    new_session_event = {
        TxEnum.KEY.name: None,
        TxEnum.CONTENT.name: {
            "optimizer_params": {
                "listen_bound": 2,
                "total_bound": 2
            },
            "serialized_job": serialized_job
        }
    }
    return new_session_event
Exemple #6
0
def new_session_event(mnist_filepath):
    serialized_job = make_serialized_job(mnist_filepath)
    new_session_event = {
        TxEnum.KEY.name: None,
        TxEnum.CONTENT.name: {
            "optimizer_params": {
                "num_averages_per_round": 2,
                "max_rounds": 2
            },
            "serialized_job": serialized_job
        }
    }
    return new_session_event
Exemple #7
0
def new_session_event(mnist_uuid):
    serialized_job = make_serialized_job()
    new_session_event = {
        # TxEnum.KEY.name: None,
        # TxEnum.CONTENT.name: {
        "optimizer_params": {
            "num_averages_per_round": 2,
            "max_rounds": 2
        },
        "serialized_job": serialized_job,
        "participants": ['0fcf9cbb-39df-4ad6-9042-a64c87fecfb3', mnist_uuid]
        # }
    }
    return new_session_event
Exemple #8
0
def initialization_payload(small_uuid):
    serialized_job = make_serialized_job()
    new_session_event = {
        TxEnum.KEY.name: {
            "dataset_uuid": small_uuid,
            "label_column_name": "label"
        },
        TxEnum.CONTENT.name: {
            "optimizer_params": {
                "num_averages_per_round": 2,
                "max_rounds": 2
            },
            "serialized_job":
            serialized_job,
            "participants": [
                '0fcf9cbb-39df-4ad6-9042-a64c87fecfb3',
                'd16c6e86-d103-4e71-8741-ee1f888d206c'
            ]
        }
    }
    return new_session_event
Exemple #9
0
#!/usr/bin/env python

import json
import requests

import ipfsapi

from tests.testing_utils import make_serialized_job

serialized_job = make_serialized_job("bleh")
serialized_job["job_data"]["hyperparams"]["batch_size"] = 32
serialized_job["job_data"]["hyperparams"]["epochs"] = 100
# client one corresponds to artifacts/integration/configuration.ini
# client one corresponds to tests/artifacts/integration
# client two corresponds to artifacts/integration/configuration2.ini
# client two corresponds to tests/artifacts/datasets
mnist_uuid = '0fcf9cbb-39df-4ad6-9042-a64c87fecfb3'
mnist_uuid_2 = 'd16c6e86-d103-4e71-8741-ee1f888d206c'
new_session_key = {"dataset_uuid": mnist_uuid, "label_column_name": "label"}
new_session_key_2 = {
    "dataset_uuid": mnist_uuid_2,
    "label_column_name": "label"
}
new_session_event = {
    "optimizer_params": {
        "num_averages_per_round": 0,
        "max_rounds": 5
    },
    "serialized_job": serialized_job,
    "participants": [mnist_uuid]
    # "participants": ['0fcf9cbb-39df-4ad6-9042-a64c87fecfb3',