def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]
    backend = config.backend
    work_mode = config.work_mode

    guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
    host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}

    pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)

    dataio_0 = DataIO(name="dataio_0")
    dataio_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True, missing_fill=True,
                                                                              outlier_replace=True)
    dataio_0.get_party_instance(role='host', party_id=host).component_param(with_label=False, missing_fill=True,
                                                                            outlier_replace=True)

    intersection_0 = Intersection(name="intersection_0")
    federated_sample_0 = FederatedSample(name="federated_sample_0", mode="stratified", method="upsample",
                                         fractions=[[0, 1.5], [1, 2.0]])
    feature_scale_0 = FeatureScale(name="feature_scale_0", method="min_max_scale", mode="normal")
    feature_scale_0.get_party_instance(role='guest', party_id=guest).component_param(feat_upper=[1, 2, 1, 1, 0.5, 1, 2, 2, 1, 1])
    hetero_feature_binning_0 = HeteroFeatureBinning(name="hetero_feature_binning_0")
    hetero_feature_selection_0 = HeteroFeatureSelection(name="hetero_feature_selection_0")
    one_hot_0 = OneHotEncoder(name="one_hot_0")
    hetero_lr_0 = HeteroLR(name="hetero_lr_0", penalty="L2", optimizer="rmsprop", tol=1e-5,
                           init_param={"init_method": "random_uniform"},
                           alpha=0.01, max_iter=10, early_stop="diff", batch_size=320, learning_rate=0.15)
    evaluation_0 = Evaluation(name="evaluation_0")

    pipeline.add_component(reader_0)
    pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0, data=Data(data=dataio_0.output.data))
    pipeline.add_component(federated_sample_0, data=Data(data=intersection_0.output.data))
    pipeline.add_component(feature_scale_0, data=Data(data=federated_sample_0.output.data))
    pipeline.add_component(hetero_feature_binning_0, data=Data(data=feature_scale_0.output.data))
    pipeline.add_component(hetero_feature_selection_0, data=Data(data=hetero_feature_binning_0.output.data))
    pipeline.add_component(one_hot_0, data=Data(data=hetero_feature_selection_0.output.data))
    pipeline.add_component(hetero_lr_0, data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(evaluation_0, data=Data(data=hetero_lr_0.output.data))
    pipeline.compile()

    job_parameters = JobParameters(backend=backend, work_mode=work_mode)
    pipeline.fit(job_parameters)

    print(pipeline.get_component("evaluation_0").get_summary())
Exemplo n.º 2
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_homo_guest",
        "namespace": f"experiment_sid{namespace}"
    }
    host_train_data = {
        "name": "breast_homo_host",
        "namespace": f"experiment_sid{namespace}"
    }

    guest_eval_data = {
        "name": "breast_homo_guest",
        "namespace": f"experiment_sid{namespace}"
    }
    host_eval_data = {
        "name": "breast_homo_host",
        "namespace": f"experiment_sid{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    reader_1 = Reader(name="reader_1")
    reader_1.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_eval_data)
    reader_1.get_party_instance(
        role='host', party_id=host).component_param(table=host_eval_data)
    # define DataTransform components
    data_transform_0 = DataTransform(name="data_transform_0",
                                     with_match_id=True,
                                     with_label=True,
                                     output_format="dense")
    data_transform_1 = DataTransform(
        name="data_transform_1")  # start component numbering at 0

    scale_0 = FeatureScale(name='scale_0')
    scale_1 = FeatureScale(name='scale_1')

    param = {
        "penalty": "L2",
        "optimizer": "sgd",
        "tol": 1e-05,
        "alpha": 0.01,
        "max_iter": 3,
        "early_stop": "diff",
        "batch_size": 320,
        "learning_rate": 0.15,
        "validation_freqs": 1,
        "init_param": {
            "init_method": "zeros"
        },
        "encrypt_param": {
            "method": None
        },
        "cv_param": {
            "n_splits": 4,
            "shuffle": True,
            "random_seed": 33,
            "need_cv": False
        }
    }

    homo_lr_0 = HomoLR(name='homo_lr_0', **param)

    # add components to pipeline, in order of task execution
    pipeline.add_component(reader_0)
    pipeline.add_component(reader_1)

    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(data_transform_1,
                           data=Data(data=reader_1.output.data),
                           model=Model(data_transform_0.output.model))

    # set data input sources of intersection components
    pipeline.add_component(scale_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(scale_1,
                           data=Data(data=data_transform_1.output.data),
                           model=Model(scale_0.output.model))

    pipeline.add_component(homo_lr_0,
                           data=Data(train_data=scale_0.output.data,
                                     validate_data=scale_1.output.data))
    evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
    evaluation_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)
    pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))

    # compile pipeline once finished adding modules, this step will form conf and dsl files for running job
    pipeline.compile()

    # fit model
    pipeline.fit()
    # query component summary
    print(
        json.dumps(pipeline.get_component("homo_lr_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
    print(
        json.dumps(pipeline.get_component("evaluation_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment_sid{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment_sid{namespace}"
    }

    pipeline = PipeLine().set_initiator(role='guest', party_id=guest).\
        set_roles(guest=guest, host=host, arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    data_transform_0 = DataTransform(name="data_transform_0",
                                     with_match_id=True)
    data_transform_0.get_party_instance(
        role='guest', party_id=guest).component_param(with_label=True)
    data_transform_0.get_party_instance(
        role='host', party_id=host).component_param(with_label=False)

    intersection_0 = Intersection(name="intersection_0")
    feature_scale_0 = FeatureScale(name='feature_scale_0',
                                   method="standard_scale",
                                   need_run=True)

    binning_param = {
        "method": "quantile",
        "compress_thres": 10000,
        "head_size": 10000,
        "error": 0.001,
        "bin_num": 10,
        "bin_indexes": -1,
        "adjustment_factor": 0.5,
        "local_only": False,
        "need_run": True,
        "transform_param": {
            "transform_cols": -1,
            "transform_type": "bin_num"
        }
    }
    hetero_feature_binning_0 = HeteroFeatureBinning(
        name='hetero_feature_binning_0', **binning_param)

    statistic_0 = DataStatistics(name='statistic_0', statistics=["95%"])
    pearson_0 = HeteroPearson(name='pearson_0', column_indexes=-1)
    onehot_0 = OneHotEncoder(name='onehot_0')
    selection_param = {
        "name":
        "hetero_feature_selection_0",
        "select_col_indexes":
        -1,
        "select_names": [],
        "filter_methods": [
            "manually", "unique_value", "iv_filter",
            "coefficient_of_variation_value_thres", "outlier_cols"
        ],
        "manually_param": {
            "filter_out_indexes": [0, 1, 2],
            "filter_out_names": ["x3"]
        },
        "unique_param": {
            "eps": 1e-06
        },
        "iv_param": {
            "metrics": ["iv", "iv", "iv"],
            "filter_type": ["threshold", "top_k", "top_percentile"],
            "threshold": [0.001, 100, 0.99]
        },
        "variance_coe_param": {
            "value_threshold": 0.3
        },
        "outlier_param": {
            "percentile": 0.95,
            "upper_threshold": 2.0
        }
    }
    hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)

    lr_param = {
        "name": "hetero_lr_0",
        "penalty": "L2",
        "optimizer": "rmsprop",
        "tol": 0.0001,
        "alpha": 0.01,
        "max_iter": 30,
        "early_stop": "diff",
        "batch_size": 320,
        "learning_rate": 0.15,
        "init_param": {
            "init_method": "zeros"
        },
        "sqn_param": {
            "update_interval_L": 3,
            "memory_M": 5,
            "sample_size": 5000,
            "random_seed": None
        },
        "cv_param": {
            "n_splits": 5,
            "shuffle": False,
            "random_seed": 103,
            "need_cv": False
        }
    }

    hetero_lr_0 = HeteroLR(**lr_param)
    evaluation_0 = Evaluation(name='evaluation_0')

    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(feature_scale_0,
                           data=Data(data=intersection_0.output.data))
    pipeline.add_component(hetero_feature_binning_0,
                           data=Data(data=feature_scale_0.output.data))
    pipeline.add_component(statistic_0,
                           data=Data(data=feature_scale_0.output.data))
    pipeline.add_component(pearson_0,
                           data=Data(data=feature_scale_0.output.data))

    pipeline.add_component(
        hetero_feature_selection_0,
        data=Data(data=hetero_feature_binning_0.output.data),
        model=Model(isometric_model=[
            hetero_feature_binning_0.output.model, statistic_0.output.model
        ]))
    pipeline.add_component(
        onehot_0, data=Data(data=hetero_feature_selection_0.output.data))

    pipeline.add_component(hetero_lr_0,
                           data=Data(train_data=onehot_0.output.data))
    pipeline.add_component(evaluation_0,
                           data=Data(data=hetero_lr_0.output.data))

    pipeline.compile()

    pipeline.fit()
Exemplo n.º 4
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_homo_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_homo_host",
        "namespace": f"experiment{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    # define DataTransform components
    data_transform_0 = DataTransform(
        name="data_transform_0", with_label=True,
        output_format="dense")  # start component numbering at 0

    scale_0 = FeatureScale(name='scale_0')
    param = {
        "penalty": "L2",
        "optimizer": "sgd",
        "tol": 1e-05,
        "alpha": 0.01,
        "max_iter": 30,
        "early_stop": "diff",
        "batch_size": -1,
        "learning_rate": 0.15,
        "decay": 1,
        "decay_sqrt": True,
        "init_param": {
            "init_method": "zeros"
        },
        "encrypt_param": {
            "method": None
        },
        "cv_param": {
            "n_splits": 4,
            "shuffle": True,
            "random_seed": 33,
            "need_cv": False
        }
    }

    homo_lr_0 = HomoLR(name='homo_lr_0', **param)

    # add components to pipeline, in order of task execution
    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    # set data input sources of intersection components
    pipeline.add_component(scale_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(homo_lr_0,
                           data=Data(train_data=scale_0.output.data))
    evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
    evaluation_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)
    pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))

    # compile pipeline once finished adding modules, this step will form conf and dsl files for running job
    pipeline.compile()

    # fit model
    pipeline.fit()

    deploy_components = [data_transform_0, scale_0, homo_lr_0]
    pipeline.deploy_component(components=deploy_components)
    #
    predict_pipeline = PipeLine()
    # # add data reader onto predict pipeline
    predict_pipeline.add_component(reader_0)
    # # add selected components from train pipeline onto predict pipeline
    # # specify data source
    predict_pipeline.add_component(
        pipeline,
        data=Data(predict_input={
            pipeline.data_transform_0.input.data: reader_0.output.data
        }))
    predict_pipeline.compile()
    predict_pipeline.predict()

    dsl_json = predict_pipeline.get_predict_dsl()
    conf_json = predict_pipeline.get_predict_conf()
    # import json
    json.dump(dsl_json,
              open('./h**o-lr-normal-predict-dsl.json', 'w'),
              indent=4)
    json.dump(conf_json,
              open('./h**o-lr-normal-predict-conf.json', 'w'),
              indent=4)

    # query component summary
    print(
        json.dumps(pipeline.get_component("homo_lr_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
    print(
        json.dumps(pipeline.get_component("evaluation_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
Exemplo n.º 5
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "mock_string",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "mock_string",
        "namespace": f"experiment{namespace}"
    }

    guest_eval_data = {
        "name": "mock_string",
        "namespace": f"experiment{namespace}"
    }
    host_eval_data = {
        "name": "mock_string",
        "namespace": f"experiment{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    reader_1 = Reader(name="reader_1")
    reader_1.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_eval_data)
    reader_1.get_party_instance(
        role='host', party_id=host).component_param(table=host_eval_data)

    # define DataTransform components
    data_transform_0 = DataTransform(
        name="data_transform_0",
        with_label=True,
        output_format="dense",
        label_name='y',
        data_type="str")  # start component numbering at 0
    data_transform_1 = DataTransform(name="data_transform_1")

    homo_onehot_param = {
        "transform_col_indexes": -1,
        "transform_col_names": [],
        "need_alignment": True
    }

    homo_onehot_0 = HomoOneHotEncoder(name='homo_onehot_0',
                                      **homo_onehot_param)
    homo_onehot_1 = HomoOneHotEncoder(name='homo_onehot_1')

    scale_0 = FeatureScale(name='scale_0', method="standard_scale")
    scale_1 = FeatureScale(name='scale_1')

    homo_lr_param = {
        "penalty": "L2",
        "optimizer": "sgd",
        "tol": 1e-05,
        "alpha": 0.01,
        "max_iter": 3,
        "early_stop": "diff",
        "batch_size": 500,
        "learning_rate": 0.15,
        "decay": 1,
        "decay_sqrt": True,
        "init_param": {
            "init_method": "zeros"
        },
        "encrypt_param": {
            "method": "Paillier"
        },
        "cv_param": {
            "n_splits": 4,
            "shuffle": True,
            "random_seed": 33,
            "need_cv": False
        }
    }

    homo_lr_0 = HomoLR(name='homo_lr_0', **homo_lr_param)
    homo_lr_1 = HomoLR(name='homo_lr_1')

    # add components to pipeline, in order of task execution
    pipeline.add_component(reader_0)
    pipeline.add_component(reader_1)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    # set data_transform_1 to replicate model from data_transform_0
    pipeline.add_component(data_transform_1,
                           data=Data(data=reader_1.output.data),
                           model=Model(data_transform_0.output.model))

    pipeline.add_component(homo_onehot_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(homo_onehot_1,
                           data=Data(data=data_transform_1.output.data),
                           model=Model(homo_onehot_0.output.model))
    pipeline.add_component(scale_0, data=Data(data=homo_onehot_0.output.data))
    pipeline.add_component(scale_1,
                           data=Data(data=homo_onehot_1.output.data),
                           model=Model(scale_0.output.model))
    pipeline.add_component(homo_lr_0,
                           data=Data(train_data=scale_0.output.data))
    pipeline.add_component(homo_lr_1,
                           data=Data(test_data=scale_1.output.data),
                           model=Model(homo_lr_0.output.model))
    evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
    evaluation_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)
    pipeline.add_component(
        evaluation_0,
        data=Data(data=[homo_lr_0.output.data, homo_lr_1.output.data]))
    pipeline.compile()

    # fit model
    pipeline.fit()
    # query component summary
    print(
        json.dumps(pipeline.get_component("homo_lr_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
    print(
        json.dumps(pipeline.get_component("evaluation_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
Exemplo n.º 6
0
def make_single_predict_pipeline(config,
                                 namespace,
                                 selection_param,
                                 is_multi_host=False,
                                 **kwargs):
    parties = config.parties
    guest = parties.guest[0]
    if is_multi_host:
        hosts = parties.host
    else:
        hosts = parties.host[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    guest_eval_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_eval_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=hosts)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=hosts).component_param(table=host_train_data)

    # define DataTransform components
    data_transform_0 = DataTransform(
        name="data_transform_0")  # start component numbering at 0

    # get DataTransform party instance of guest
    data_transform_0_guest_party_instance = data_transform_0.get_party_instance(
        role='guest', party_id=guest)
    # configure DataTransform for guest
    data_transform_0_guest_party_instance.component_param(
        with_label=True, output_format="dense")
    # get and configure DataTransform party instance of host
    data_transform_0.get_party_instance(
        role='host', party_id=hosts).component_param(with_label=False)

    # define Intersection components
    intersection_0 = Intersection(name="intersection_0")
    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0,
                           data=Data(data=data_transform_0.output.data))

    reader_1 = Reader(name="reader_1")
    reader_1.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_eval_data)
    reader_1.get_party_instance(
        role='host', party_id=hosts).component_param(table=host_eval_data)
    data_transform_1 = DataTransform(name="data_transform_1")
    intersection_1 = Intersection(name="intersection_1")

    pipeline.add_component(reader_1)
    pipeline.add_component(data_transform_1,
                           data=Data(data=reader_1.output.data),
                           model=Model(data_transform_0.output.model))
    pipeline.add_component(intersection_1,
                           data=Data(data=data_transform_1.output.data))

    sample_0 = FederatedSample(name='sample_0', fractions=0.9)
    pipeline.add_component(sample_0,
                           data=Data(data=intersection_0.output.data))

    if "binning_param" not in kwargs:
        raise ValueError("Binning_param is needed")

    hetero_feature_binning_0 = HeteroFeatureBinning(**kwargs['binning_param'])
    pipeline.add_component(hetero_feature_binning_0,
                           data=Data(data=sample_0.output.data))

    hetero_feature_binning_1 = HeteroFeatureBinning(
        name='hetero_feature_binning_1')
    pipeline.add_component(hetero_feature_binning_1,
                           data=Data(data=intersection_1.output.data),
                           model=Model(hetero_feature_binning_0.output.model))

    hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)
    pipeline.add_component(
        hetero_feature_selection_0,
        data=Data(data=hetero_feature_binning_0.output.data),
        model=Model(isometric_model=[hetero_feature_binning_0.output.model]))

    hetero_feature_selection_1 = HeteroFeatureSelection(
        name='hetero_feature_selection_1')
    pipeline.add_component(
        hetero_feature_selection_1,
        data=Data(data=hetero_feature_binning_1.output.data),
        model=Model(hetero_feature_selection_0.output.model))

    scale_0 = FeatureScale(name='scale_0')
    scale_1 = FeatureScale(name='scale_1')

    pipeline.add_component(
        scale_0, data=Data(data=hetero_feature_selection_0.output.data))
    pipeline.add_component(
        scale_1,
        data=Data(data=hetero_feature_selection_1.output.data),
        model=Model(scale_0.output.model))
    pipeline.compile()
    return pipeline
Exemplo n.º 7
0
def make_feature_engineering_dsl(config, namespace, lr_param, is_multi_host=False, has_validate=False,
                                 is_cv=False, is_ovr=False):
    parties = config.parties
    guest = parties.guest[0]
    if is_multi_host:
        hosts = parties.host
    else:
        hosts = parties.host[0]
    arbiter = parties.arbiter[0]

    if is_ovr:
        guest_train_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
        host_train_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}

        guest_eval_data = {"name": "vehicle_scale_hetero_guest", "namespace": f"experiment{namespace}"}
        host_eval_data = {"name": "vehicle_scale_hetero_host", "namespace": f"experiment{namespace}"}
    else:
        guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
        host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}

        guest_eval_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
        host_eval_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}

    train_line = []
    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)

    # define DataTransform components
    data_transform_0 = DataTransform(name="data_transform_0")  # start component numbering at 0

    # get DataTransform party instance of guest
    data_transform_0_guest_party_instance = data_transform_0.get_party_instance(role='guest', party_id=guest)
    # configure DataTransform for guest
    data_transform_0_guest_party_instance.component_param(with_label=True, output_format="dense")
    # get and configure DataTransform party instance of host
    data_transform_0.get_party_instance(role='host', party_id=hosts).component_param(with_label=False)

    train_line.append(data_transform_0)

    # define Intersection components
    intersection_0 = Intersection(name="intersection_0")
    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0, data=Data(data=data_transform_0.output.data))

    train_line.append(intersection_0)

    feature_scale_0 = FeatureScale(name='feature_scale_0', method="standard_scale",
                                   need_run=True)
    pipeline.add_component(feature_scale_0, data=Data(data=intersection_0.output.data))
    train_line.append(feature_scale_0)

    binning_param = {
        "method": "quantile",
        "compress_thres": 10000,
        "head_size": 10000,
        "error": 0.001,
        "bin_num": 10,
        "bin_indexes": -1,
        "adjustment_factor": 0.5,
        "local_only": False,
        "need_run": True,
        "transform_param": {
            "transform_cols": -1,
            "transform_type": "bin_num"
        }
    }
    hetero_feature_binning_0 = HeteroFeatureBinning(name='hetero_feature_binning_0',
                                                    **binning_param)
    pipeline.add_component(hetero_feature_binning_0, data=Data(data=feature_scale_0.output.data))
    train_line.append(hetero_feature_binning_0)

    selection_param = {
        "select_col_indexes": -1,
        "filter_methods": [
            "manually",
            "iv_value_thres",
            "iv_percentile"
        ],
        "manually_param": {
            "filter_out_indexes": None
        },
        "iv_value_param": {
            "value_threshold": 1.0
        },
        "iv_percentile_param": {
            "percentile_threshold": 0.9
        },
        "need_run": True
    }
    hetero_feature_selection_0 = HeteroFeatureSelection(name='hetero_feature_selection_0',
                                                        **selection_param)
    pipeline.add_component(hetero_feature_selection_0, data=Data(data=hetero_feature_binning_0.output.data),
                           model=Model(isometric_model=[hetero_feature_binning_0.output.model]))
    train_line.append(hetero_feature_selection_0)

    onehot_param = {
        "transform_col_indexes": -1,
        "transform_col_names": None,
        "need_run": True
    }
    one_hot_encoder_0 = OneHotEncoder(name='one_hot_encoder_0', **onehot_param)
    pipeline.add_component(one_hot_encoder_0, data=Data(data=hetero_feature_selection_0.output.data))
    train_line.append(one_hot_encoder_0)

    last_cpn = None
    if has_validate:
        reader_1 = Reader(name="reader_1")
        reader_1.get_party_instance(role='guest', party_id=guest).component_param(table=guest_eval_data)
        reader_1.get_party_instance(role='host', party_id=hosts).component_param(table=host_eval_data)
        pipeline.add_component(reader_1)
        last_cpn = reader_1
        for cpn in train_line:
            cpn_name = cpn.name
            new_name = "_".join(cpn_name.split('_')[:-1] + ['1'])
            validate_cpn = type(cpn)(name=new_name)
            if hasattr(cpn.output, "model"):
                pipeline.add_component(validate_cpn, data=Data(data=last_cpn.output.data),
                                       model=Model(cpn.output.model))
            else:
                pipeline.add_component(validate_cpn, data=Data(data=last_cpn.output.data))
            last_cpn = validate_cpn

    hetero_lr_0 = HeteroLR(**lr_param)
    if has_validate:
        pipeline.add_component(hetero_lr_0, data=Data(train_data=one_hot_encoder_0.output.data,
                                                      validate_data=last_cpn.output.data))
    else:
        pipeline.add_component(hetero_lr_0, data=Data(train_data=one_hot_encoder_0.output.data))

    if is_cv:
        pipeline.compile()
        return pipeline

    evaluation_data = [hetero_lr_0.output.data]
    if has_validate:
        hetero_lr_1 = HeteroLR(name='hetero_lr_1')
        pipeline.add_component(hetero_lr_1, data=Data(test_data=last_cpn.output.data),
                               model=Model(hetero_lr_0.output.model))
        evaluation_data.append(hetero_lr_1.output.data)

    evaluation_0 = Evaluation(name="evaluation_0", eval_type="binary")
    pipeline.add_component(evaluation_0, data=Data(data=evaluation_data))

    pipeline.compile()
    return pipeline
Exemplo n.º 8
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    pipeline = PipeLine().set_initiator(
        role='guest', party_id=guest).set_roles(guest=guest,
                                                host=host,
                                                arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    data_transform_0 = DataTransform(name="data_transform_0")
    data_transform_0.get_party_instance(
        role='guest', party_id=guest).component_param(with_label=True,
                                                      missing_fill=True,
                                                      outlier_replace=True)
    data_transform_0.get_party_instance(
        role='host', party_id=host).component_param(with_label=False,
                                                    missing_fill=True,
                                                    outlier_replace=True)

    intersection_0 = Intersection(name="intersection_0")
    federated_sample_0 = FederatedSample(name="federated_sample_0",
                                         mode="stratified",
                                         method="upsample",
                                         fractions=[[0, 1.5], [1, 2.0]])
    feature_scale_0 = FeatureScale(name="feature_scale_0")
    hetero_feature_binning_0 = HeteroFeatureBinning(
        name="hetero_feature_binning_0")
    hetero_feature_selection_0 = HeteroFeatureSelection(
        name="hetero_feature_selection_0")
    one_hot_0 = OneHotEncoder(name="one_hot_0")
    hetero_lr_0 = HeteroLR(name="hetero_lr_0",
                           penalty="L2",
                           optimizer="rmsprop",
                           tol=1e-5,
                           init_param={"init_method": "random_uniform"},
                           alpha=0.01,
                           max_iter=10,
                           early_stop="diff",
                           batch_size=320,
                           learning_rate=0.15)
    hetero_lr_1 = HeteroLR(name="hetero_lr_1",
                           penalty="L2",
                           optimizer="rmsprop",
                           tol=1e-5,
                           init_param={"init_method": "random_uniform"},
                           alpha=0.01,
                           max_iter=10,
                           early_stop="diff",
                           batch_size=320,
                           learning_rate=0.15,
                           cv_param={
                               "n_splits": 5,
                               "shuffle": True,
                               "random_seed": 103,
                               "need_cv": True
                           })

    hetero_secureboost_0 = HeteroSecureBoost(name="hetero_secureboost_0",
                                             num_trees=5,
                                             cv_param={
                                                 "shuffle": False,
                                                 "need_cv": True
                                             })
    hetero_secureboost_1 = HeteroSecureBoost(name="hetero_secureboost_1",
                                             num_trees=5)
    evaluation_0 = Evaluation(name="evaluation_0")
    evaluation_1 = Evaluation(name="evaluation_1")

    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(federated_sample_0,
                           data=Data(data=intersection_0.output.data))
    pipeline.add_component(feature_scale_0,
                           data=Data(data=federated_sample_0.output.data))
    pipeline.add_component(hetero_feature_binning_0,
                           data=Data(data=feature_scale_0.output.data))
    pipeline.add_component(
        hetero_feature_selection_0,
        data=Data(data=hetero_feature_binning_0.output.data))
    pipeline.add_component(
        one_hot_0, data=Data(data=hetero_feature_selection_0.output.data))
    pipeline.add_component(hetero_lr_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_lr_1,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_secureboost_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_secureboost_1,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(evaluation_0,
                           data=Data(data=hetero_lr_0.output.data))
    pipeline.add_component(evaluation_1,
                           data=Data(data=hetero_secureboost_1.output.data))
    pipeline.compile()

    pipeline.fit()

    print(pipeline.get_component("evaluation_0").get_summary())
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]
    backend = config.backend
    work_mode = config.work_mode

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    pipeline = PipeLine().set_initiator(
        role='guest', party_id=guest).set_roles(guest=guest,
                                                host=host,
                                                arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    dataio_0 = DataIO(name="dataio_0")
    dataio_0.get_party_instance(role='guest', party_id=guest).component_param(
        with_label=True,
        label_name="y",
        label_type="int",
        output_format="dense")
    dataio_0.get_party_instance(
        role='host', party_id=host).component_param(with_label=False)

    intersection_0 = Intersection(name="intersection_0")

    binning_param = {
        "name": 'hetero_feature_binning_0',
        "method": "quantile",
        "compress_thres": 10000,
        "head_size": 10000,
        "error": 0.001,
        "bin_num": 10,
        "bin_indexes": -1,
        "bin_names": None,
        "category_indexes": None,
        "category_names": None,
        "adjustment_factor": 0.5,
        "local_only": False,
        "transform_param": {
            "transform_cols": -1,
            "transform_names": None,
            "transform_type": "bin_num"
        }
    }

    selection_param = {
        "name": "hetero_feature_selection_0",
        "select_col_indexes": -1,
        "select_names": [],
        "filter_methods": ["iv_value_thres"],
        "iv_value_param": {
            "value_threshold": 0.1
        }
    }
    hetero_feature_binning_0 = HeteroFeatureBinning(**binning_param)

    hetero_feature_selection_0 = HeteroFeatureSelection(**selection_param)

    sample_weight_0 = SampleWeight(name="sample_weight_0")
    sample_weight_0.get_party_instance(
        role='guest', party_id=guest).component_param(need_run=True,
                                                      class_weight={
                                                          "0": 1,
                                                          "1": 2
                                                      })
    sample_weight_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)

    feature_scale_0 = FeatureScale(name="feature_scale_0",
                                   method="standard_scale",
                                   need_run=True)

    hetero_lr_0 = HeteroLR(name="hetero_lr_0",
                           optimizer="nesterov_momentum_sgd",
                           tol=0.001,
                           alpha=0.01,
                           max_iter=20,
                           early_stop="weight_diff",
                           batch_size=-1,
                           learning_rate=0.15,
                           init_param={"init_method": "zeros"})

    evaluation_0 = Evaluation(name="evaluation_0",
                              eval_type="binary",
                              pos_label=1)
    # evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)

    pipeline.add_component(reader_0)
    pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0,
                           data=Data(data=dataio_0.output.data))
    pipeline.add_component(sample_weight_0,
                           data=Data(data=intersection_0.output.data))
    pipeline.add_component(hetero_feature_binning_0,
                           data=Data(data=sample_weight_0.output.data))
    pipeline.add_component(
        hetero_feature_selection_0,
        data=Data(data=hetero_feature_binning_0.output.data),
        model=Model(isometric_model=[hetero_feature_binning_0.output.model]))
    pipeline.add_component(feature_scale_0,
                           data=Data(hetero_feature_selection_0.output.data))
    pipeline.add_component(hetero_lr_0,
                           data=Data(train_data=feature_scale_0.output.data))
    pipeline.add_component(evaluation_0,
                           data=Data(data=hetero_lr_0.output.data))

    pipeline.compile()

    job_parameters = JobParameters(backend=backend, work_mode=work_mode)
    pipeline.fit(job_parameters)
Exemplo n.º 10
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    pipeline = PipeLine().set_initiator(
        role='guest', party_id=guest).set_roles(guest=guest,
                                                host=host,
                                                arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    reader_1 = Reader(name="reader_1")
    reader_1.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_1.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    reader_2 = Reader(name="reader_2")
    reader_2.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_2.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    data_transform_0 = DataTransform(name="data_transform_0")
    data_transform_0.get_party_instance(
        role='guest', party_id=guest).component_param(with_label=True,
                                                      missing_fill=True,
                                                      outlier_replace=True)
    data_transform_0.get_party_instance(
        role='host', party_id=host).component_param(with_label=False,
                                                    missing_fill=True,
                                                    outlier_replace=True)
    data_transform_1 = DataTransform(name="data_transform_1")
    data_transform_2 = DataTransform(name="data_transform_2")

    intersection_0 = Intersection(name="intersection_0")
    intersection_1 = Intersection(name="intersection_1")
    intersection_2 = Intersection(name="intersection_2")

    union_0 = Union(name="union_0")

    federated_sample_0 = FederatedSample(name="federated_sample_0",
                                         mode="stratified",
                                         method="downsample",
                                         fractions=[[0, 1.0], [1, 1.0]])

    feature_scale_0 = FeatureScale(name="feature_scale_0")
    feature_scale_1 = FeatureScale(name="feature_scale_1")

    hetero_feature_binning_0 = HeteroFeatureBinning(
        name="hetero_feature_binning_0")
    hetero_feature_binning_1 = HeteroFeatureBinning(
        name="hetero_feature_binning_1")

    hetero_feature_selection_0 = HeteroFeatureSelection(
        name="hetero_feature_selection_0")
    hetero_feature_selection_1 = HeteroFeatureSelection(
        name="hetero_feature_selection_1")

    one_hot_0 = OneHotEncoder(name="one_hot_0")
    one_hot_1 = OneHotEncoder(name="one_hot_1")

    hetero_lr_0 = HeteroLR(name="hetero_lr_0",
                           penalty="L2",
                           optimizer="rmsprop",
                           tol=1e-5,
                           init_param={"init_method": "random_uniform"},
                           alpha=0.01,
                           max_iter=3,
                           early_stop="diff",
                           batch_size=320,
                           learning_rate=0.15)
    hetero_lr_1 = HeteroLR(name="hetero_lr_1")
    hetero_lr_2 = HeteroLR(name="hetero_lr_2",
                           penalty="L2",
                           optimizer="rmsprop",
                           tol=1e-5,
                           init_param={"init_method": "random_uniform"},
                           alpha=0.01,
                           max_iter=3,
                           early_stop="diff",
                           batch_size=320,
                           learning_rate=0.15,
                           cv_param={
                               "n_splits": 5,
                               "shuffle": True,
                               "random_seed": 103,
                               "need_cv": True
                           })

    hetero_sshe_lr_0 = HeteroSSHELR(
        name="hetero_sshe_lr_0",
        reveal_every_iter=True,
        reveal_strategy="respectively",
        penalty="L2",
        optimizer="rmsprop",
        tol=1e-5,
        batch_size=320,
        learning_rate=0.15,
        init_param={"init_method": "random_uniform"},
        alpha=0.01,
        max_iter=3)
    hetero_sshe_lr_1 = HeteroSSHELR(name="hetero_sshe_lr_1")

    local_baseline_0 = LocalBaseline(name="local_baseline_0",
                                     model_name="LogisticRegression",
                                     model_opts={
                                         "penalty": "l2",
                                         "tol": 0.0001,
                                         "C": 1.0,
                                         "fit_intercept": True,
                                         "solver": "lbfgs",
                                         "max_iter": 5,
                                         "multi_class": "ovr"
                                     })
    local_baseline_0.get_party_instance(
        role='guest', party_id=guest).component_param(need_run=True)
    local_baseline_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)
    local_baseline_1 = LocalBaseline(name="local_baseline_1")

    hetero_secureboost_0 = HeteroSecureBoost(name="hetero_secureboost_0",
                                             num_trees=3)
    hetero_secureboost_1 = HeteroSecureBoost(name="hetero_secureboost_1")
    hetero_secureboost_2 = HeteroSecureBoost(name="hetero_secureboost_2",
                                             num_trees=3,
                                             cv_param={
                                                 "shuffle": False,
                                                 "need_cv": True
                                             })

    hetero_linr_0 = HeteroLinR(name="hetero_linr_0",
                               penalty="L2",
                               optimizer="sgd",
                               tol=0.001,
                               alpha=0.01,
                               max_iter=3,
                               early_stop="weight_diff",
                               batch_size=-1,
                               learning_rate=0.15,
                               decay=0.0,
                               decay_sqrt=False,
                               init_param={"init_method": "zeros"},
                               floating_point_precision=23)
    hetero_linr_1 = HeteroLinR(name="hetero_linr_1")

    hetero_sshe_linr_0 = HeteroSSHELinR(name="hetero_sshe_linr_0",
                                        max_iter=5,
                                        early_stop="weight_diff",
                                        batch_size=-1)
    hetero_sshe_linr_1 = HeteroSSHELinR(name="hetero_sshe_linr_1")

    hetero_poisson_0 = HeteroPoisson(name="hetero_poisson_0",
                                     early_stop="weight_diff",
                                     max_iter=10,
                                     alpha=100.0,
                                     batch_size=-1,
                                     learning_rate=0.01,
                                     optimizer="rmsprop",
                                     exposure_colname="exposure",
                                     decay_sqrt=False,
                                     tol=0.001,
                                     init_param={"init_method": "zeros"},
                                     penalty="L2")
    hetero_poisson_1 = HeteroPoisson(name="hetero_poisson_1")

    hetero_sshe_poisson_0 = HeteroSSHEPoisson(name="hetero_sshe_poisson_0",
                                              max_iter=5)
    hetero_sshe_poisson_1 = HeteroSSHEPoisson(name="hetero_sshe_poisson_1")

    evaluation_0 = Evaluation(name="evaluation_0")
    evaluation_1 = Evaluation(name="evaluation_1")
    evaluation_2 = Evaluation(name="evaluation_2")

    pipeline.add_component(reader_0)
    pipeline.add_component(reader_1)
    pipeline.add_component(reader_2)

    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(data_transform_1,
                           data=Data(data=reader_1.output.data),
                           model=Model(model=data_transform_0.output.model))
    pipeline.add_component(data_transform_2,
                           data=Data(data=reader_2.output.data),
                           model=Model(model=data_transform_0.output.model))

    pipeline.add_component(intersection_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(intersection_1,
                           data=Data(data=data_transform_1.output.data))
    pipeline.add_component(intersection_2,
                           data=Data(data=data_transform_2.output.data))

    pipeline.add_component(
        union_0,
        data=Data(
            data=[intersection_0.output.data, intersection_2.output.data]))

    pipeline.add_component(federated_sample_0,
                           data=Data(data=intersection_1.output.data))

    pipeline.add_component(feature_scale_0,
                           data=Data(data=union_0.output.data))
    pipeline.add_component(feature_scale_1,
                           data=Data(data=federated_sample_0.output.data),
                           model=Model(model=feature_scale_0.output.model))

    pipeline.add_component(hetero_feature_binning_0,
                           data=Data(data=feature_scale_0.output.data))
    pipeline.add_component(
        hetero_feature_binning_1,
        data=Data(data=feature_scale_1.output.data),
        model=Model(model=hetero_feature_binning_0.output.model))

    pipeline.add_component(
        hetero_feature_selection_0,
        data=Data(data=hetero_feature_binning_0.output.data))
    pipeline.add_component(
        hetero_feature_selection_1,
        data=Data(data=hetero_feature_binning_1.output.data),
        model=Model(model=hetero_feature_selection_0.output.model))

    pipeline.add_component(
        one_hot_0, data=Data(data=hetero_feature_selection_0.output.data))
    pipeline.add_component(
        one_hot_1,
        data=Data(data=hetero_feature_selection_1.output.data),
        model=Model(model=one_hot_0.output.model))

    pipeline.add_component(hetero_lr_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_lr_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=hetero_lr_0.output.model))
    pipeline.add_component(hetero_lr_2,
                           data=Data(train_data=one_hot_0.output.data))

    pipeline.add_component(local_baseline_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(local_baseline_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=local_baseline_0.output.model))

    pipeline.add_component(hetero_sshe_lr_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_sshe_lr_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=hetero_sshe_lr_0.output.model))

    pipeline.add_component(hetero_secureboost_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(
        hetero_secureboost_1,
        data=Data(test_data=one_hot_1.output.data),
        model=Model(model=hetero_secureboost_0.output.model))
    pipeline.add_component(hetero_secureboost_2,
                           data=Data(train_data=one_hot_0.output.data))

    pipeline.add_component(hetero_linr_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_linr_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=hetero_linr_0.output.model))

    pipeline.add_component(hetero_sshe_linr_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_sshe_linr_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=hetero_sshe_linr_0.output.model))

    pipeline.add_component(hetero_poisson_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(hetero_poisson_1,
                           data=Data(test_data=one_hot_1.output.data),
                           model=Model(model=hetero_poisson_0.output.model))

    pipeline.add_component(
        evaluation_0,
        data=Data(data=[
            hetero_lr_0.output.data, hetero_lr_1.output.data,
            hetero_sshe_lr_0.output.data, hetero_sshe_lr_1.output.data,
            local_baseline_0.output.data, local_baseline_1.output.data
        ]))

    pipeline.add_component(hetero_sshe_poisson_0,
                           data=Data(train_data=one_hot_0.output.data))
    pipeline.add_component(
        hetero_sshe_poisson_1,
        data=Data(test_data=one_hot_1.output.data),
        model=Model(model=hetero_sshe_poisson_0.output.model))

    pipeline.add_component(
        evaluation_1,
        data=Data(data=[
            hetero_linr_0.output.data, hetero_linr_1.output.data,
            hetero_sshe_linr_0.output.data, hetero_linr_1.output.data
        ]))
    pipeline.add_component(
        evaluation_2,
        data=Data(data=[
            hetero_poisson_0.output.data, hetero_poisson_1.output.data,
            hetero_sshe_poisson_0.output.data,
            hetero_sshe_poisson_1.output.data
        ]))

    pipeline.compile()

    pipeline.fit()

    print(pipeline.get_component("evaluation_0").get_summary())
    print(pipeline.get_component("evaluation_1").get_summary())
    print(pipeline.get_component("evaluation_2").get_summary())
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=host, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    # define DataTransform components
    data_transform_0 = DataTransform(
        name="data_transform_0", with_label=True,
        output_format="dense")  # start component numbering at 0
    data_transform_0.get_party_instance(
        role="host", party_id=host).component_param(with_label=False)
    intersect_0 = Intersection(name='intersect_0')

    scale_0 = FeatureScale(name='scale_0', need_run=False)
    sample_weight_0 = SampleWeight(name="sample_weight_0",
                                   class_weight={
                                       "0": 1,
                                       "1": 2
                                   })
    sample_weight_0.get_party_instance(
        role="host", party_id=host).component_param(need_run=False)

    param = {
        "penalty": None,
        "optimizer": "sgd",
        "tol": 1e-05,
        "alpha": 0.01,
        "max_iter": 3,
        "early_stop": "weight_diff",
        "batch_size": 320,
        "learning_rate": 0.15,
        "decay": 0,
        "decay_sqrt": True,
        "init_param": {
            "init_method": "ones"
        },
        "reveal_every_iter": False,
        "reveal_strategy": "respectively"
    }
    hetero_sshe_lr_0 = HeteroSSHELR(name='hetero_sshe_lr_0', **param)
    evaluation_0 = Evaluation(name='evaluation_0')
    # add components to pipeline, in order of task execution
    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(intersect_0,
                           data=Data(data=data_transform_0.output.data))
    # set data input sources of intersection components
    pipeline.add_component(scale_0, data=Data(data=intersect_0.output.data))
    pipeline.add_component(sample_weight_0,
                           data=Data(data=scale_0.output.data))

    pipeline.add_component(hetero_sshe_lr_0,
                           data=Data(train_data=sample_weight_0.output.data))
    pipeline.add_component(evaluation_0,
                           data=Data(data=hetero_sshe_lr_0.output.data))

    # compile pipeline once finished adding modules, this step will form conf and dsl files for running job
    pipeline.compile()

    # fit model
    pipeline.fit()
    # query component summary
    print(
        json.dumps(pipeline.get_component("evaluation_0").get_summary(),
                   indent=4,
                   ensure_ascii=False))
Exemplo n.º 12
0
def make_normal_dsl(config, namespace):
    parties = config.parties
    guest = parties.guest[0]
    hosts = parties.host[0]
    arbiter = parties.arbiter[0]
    guest_train_data = {
        "name": "breast_homo_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_homo_host",
        "namespace": f"experiment{namespace}"
    }

    # initialize pipeline
    pipeline = PipeLine()
    # set job initiator
    pipeline.set_initiator(role='guest', party_id=guest)
    # set participants information
    pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)

    # define Reader components to read in data
    reader_0 = Reader(name="reader_0")
    # configure Reader for guest
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    # configure Reader for host
    reader_0.get_party_instance(
        role='host', party_id=hosts).component_param(table=host_train_data)

    # define DataTransform components
    data_transform_0 = DataTransform(
        name="data_transform_0")  # start component numbering at 0

    # get DataTransform party instance of guest
    data_transform_0_guest_party_instance = data_transform_0.get_party_instance(
        role='guest', party_id=guest)
    # configure DataTransform for guest
    data_transform_0_guest_party_instance.component_param(
        with_label=True, output_format="dense")
    # get and configure DataTransform party instance of host
    data_transform_0.get_party_instance(
        role='host', party_id=hosts).component_param(with_label=True)

    scale_0 = FeatureScale(name='scale_0')

    homo_sbt_0 = HomoSecureBoost(
        name="homo_secureboost_0",
        num_trees=3,
        task_type='classification',
        objective_param={"objective": "cross_entropy"},
        tree_param={"max_depth": 3},
        validation_freqs=1)

    # define Intersection components
    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(scale_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(homo_sbt_0,
                           data=Data(train_data=scale_0.output.data))

    selection_param = {
        "name": "hetero_feature_selection_0",
        "select_col_indexes": -1,
        "select_names": [],
        "filter_methods": ["homo_sbt_filter"],
        "sbt_param": {
            "metrics": "feature_importance",
            "filter_type": "threshold",
            "take_high": True,
            "threshold": 0.03
        }
    }
    feature_selection_0 = HeteroFeatureSelection(**selection_param)
    param = {
        "penalty": "L2",
        "optimizer": "sgd",
        "tol": 1e-05,
        "alpha": 0.01,
        "max_iter": 30,
        "early_stop": "diff",
        "batch_size": -1,
        "learning_rate": 0.15,
        "decay": 1,
        "decay_sqrt": True,
        "init_param": {
            "init_method": "zeros"
        },
        "encrypt_param": {
            "method": None
        },
        "cv_param": {
            "n_splits": 4,
            "shuffle": True,
            "random_seed": 33,
            "need_cv": False
        }
    }

    homo_lr_0 = HomoLR(name='homo_lr_0', **param)
    pipeline.add_component(
        feature_selection_0,
        data=Data(data=scale_0.output.data),
        model=Model(isometric_model=homo_sbt_0.output.model))
    pipeline.add_component(
        homo_lr_0, data=Data(train_data=feature_selection_0.output.data))
    evaluation_0 = Evaluation(name='evaluation_0')
    pipeline.add_component(evaluation_0, data=Data(data=homo_lr_0.output.data))
    # compile pipeline once finished adding modules, this step will form conf and dsl files for running job
    pipeline.compile()
    return pipeline
Exemplo n.º 13
0
def main(config="../../config.yaml", namespace=""):
    # obtain config
    if isinstance(config, str):
        config = load_job_config(config)
    parties = config.parties
    guest = parties.guest[0]
    host = parties.host[0]
    arbiter = parties.arbiter[0]

    guest_train_data = {
        "name": "breast_hetero_guest",
        "namespace": f"experiment{namespace}"
    }
    host_train_data = {
        "name": "breast_hetero_host",
        "namespace": f"experiment{namespace}"
    }

    pipeline = PipeLine().set_initiator(
        role='guest', party_id=guest).set_roles(guest=guest,
                                                host=host,
                                                arbiter=arbiter)

    reader_0 = Reader(name="reader_0")
    reader_0.get_party_instance(
        role='guest', party_id=guest).component_param(table=guest_train_data)
    reader_0.get_party_instance(
        role='host', party_id=host).component_param(table=host_train_data)

    data_transform_0 = DataTransform(name="data_transform_0")
    data_transform_0.get_party_instance(
        role='guest', party_id=guest).component_param(with_label=True,
                                                      label_name="y",
                                                      label_type="int",
                                                      output_format="dense")
    data_transform_0.get_party_instance(
        role='host', party_id=host).component_param(with_label=False)

    intersection_0 = Intersection(name="intersection_0")
    scale_0 = FeatureScale(name="scale_0",
                           method="min_max_scale",
                           mode="cap",
                           scale_names=["x0"])

    sample_weight_0 = SampleWeight(name="sample_weight_0")
    sample_weight_0.get_party_instance(
        role='guest', party_id=guest).component_param(need_run=True,
                                                      sample_weight_name="x0")
    sample_weight_0.get_party_instance(
        role='host', party_id=host).component_param(need_run=False)

    hetero_lr_0 = HeteroLR(name="hetero_lr_0",
                           optimizer="sgd",
                           tol=0.001,
                           alpha=0.01,
                           max_iter=20,
                           early_stop="weight_diff",
                           batch_size=-1,
                           learning_rate=0.1,
                           init_param={"init_method": "random_uniform"})

    evaluation_0 = Evaluation(name="evaluation_0",
                              eval_type="binary",
                              pos_label=1)
    # evaluation_0.get_party_instance(role='host', party_id=host).component_param(need_run=False)

    pipeline.add_component(reader_0)
    pipeline.add_component(data_transform_0,
                           data=Data(data=reader_0.output.data))
    pipeline.add_component(intersection_0,
                           data=Data(data=data_transform_0.output.data))
    pipeline.add_component(scale_0, data=Data(intersection_0.output.data))
    pipeline.add_component(sample_weight_0,
                           data=Data(data=scale_0.output.data))
    pipeline.add_component(hetero_lr_0,
                           data=Data(train_data=sample_weight_0.output.data))
    pipeline.add_component(evaluation_0,
                           data=Data(data=hetero_lr_0.output.data))

    pipeline.compile()

    pipeline.fit()

    # predict
    # deploy required components
    pipeline.deploy_component([data_transform_0, intersection_0, hetero_lr_0])

    predict_pipeline = PipeLine()
    # add data reader onto predict pipeline
    predict_pipeline.add_component(reader_0)
    # add selected components from train pipeline onto predict pipeline
    # specify data source
    predict_pipeline.add_component(
        pipeline,
        data=Data(predict_input={
            pipeline.data_transform_0.input.data: reader_0.output.data
        }))
    # run predict model
    predict_pipeline.predict()