예제 #1
0
def run_mpc(pid: str, data_root: str, mpc_backend: str):
    workflow_name = "aspirin-mpc-join-" + pid + "-" + data_root

    conclave_config = CodeGenConfig(workflow_name, int(pid))
    conclave_config.use_leaky_ops = False

    conclave_config.code_path = os.path.join("/mnt/shared", workflow_name)
    conclave_config.input_path = os.path.join("/mnt/shared", data_root)
    conclave_config.output_path = os.path.join("/mnt/shared", data_root)

    job_queue = generate_code(lambda: protocol_mpc(conclave_config.all_pids),
                              conclave_config, [mpc_backend], ["python"],
                              apply_optimizations=True)
    dispatch_jobs(job_queue, conclave_config)
예제 #2
0
    def check_workflow(self, dag, name, use_leaky_ops=True):
        self.maxDiff = None
        expected_rootdir = \
            "{}/sharemind_expected".format(os.path.dirname(os.path.realpath(__file__)))

        sm_cfg = SharemindCodeGenConfig()
        cfg = CodeGenConfig('cfg').with_sharemind_config(sm_cfg)
        cfg.use_leaky_ops = use_leaky_ops
        cg = SharemindCodeGen(cfg, dag, 1)

        actual = cg._generate('code', '/tmp')[1]['miner']

        with open(expected_rootdir + '/{}'.format(name),
                  'r') as f_specific, open(
                      expected_rootdir + '/{}'.format("base"), 'r') as f_base:
            expected_base = f_base.read()
            expected_specific = f_specific.read()
            expected = expected_base + expected_specific

        self.assertEqual(expected, actual)
예제 #3
0
파일: real.py 프로젝트: yangzpag/conclave
    right = cc.create("right", input_columns_right, {2})
    aggregated = cc.aggregate(cc.concat([left, right], "rel"), "actual",
                              ["column_a"], "column_b", "sum", "total_b")
    actual_open = cc.project(aggregated, "actual_open",
                             ["column_a", "total_b"])
    cc.collect(actual_open, 1)
    return {left, right}


if __name__ == "__main__":
    pid = sys.argv[1]
    # define name for the workflow
    workflow_name = "hybrid-agg-leaky-" + pid
    # configure conclave
    conclave_config = CodeGenConfig(workflow_name, int(pid))
    conclave_config.use_leaky_ops = False
    sharemind_conf = SharemindCodeGenConfig("/mnt/shared",
                                            use_docker=False,
                                            use_hdfs=False)
    conclave_config.with_sharemind_config(sharemind_conf)
    current_dir = os.path.dirname(os.path.realpath(__file__))
    # point conclave to the directory where the generated code should be stored/ read from
    conclave_config.code_path = os.path.join("/mnt/shared", workflow_name)
    # point conclave to directory where data is to be read from...
    conclave_config.input_path = os.path.join(current_dir, "data")
    # and written to
    conclave_config.output_path = os.path.join(current_dir, "data")
    # define this party's unique ID (in this demo there is only one party)
    job_queue = generate_code(protocol,
                              conclave_config, ["sharemind"], ["python"],
                              apply_optimizations=True)
예제 #4
0
    right_dummy = cc.project(right, "right_dummy", ["c", "d"])

    actual = cc.join(left_dummy, right_dummy, "actual", ["a"], ["c"])

    cc.collect(actual, 1)
    # create dag
    return {left, right}


if __name__ == "__main__":
    pid = sys.argv[1]
    # define name for the workflow
    workflow_name = "hybrid-join-test-" + pid
    # configure conclave
    conclave_config = CodeGenConfig(workflow_name, int(pid))
    conclave_config.use_leaky_ops = True
    sharemind_conf = SharemindCodeGenConfig("/mnt/shared",
                                            use_docker=True,
                                            use_hdfs=False)
    conclave_config.with_sharemind_config(sharemind_conf)
    current_dir = os.path.dirname(os.path.realpath(__file__))
    # point conclave to the directory where the generated code should be stored/ read from
    conclave_config.code_path = os.path.join("/mnt/shared", workflow_name)
    # point conclave to directory where data is to be read from...
    conclave_config.input_path = os.path.join(current_dir, "data")
    # and written to
    conclave_config.output_path = os.path.join(current_dir, "data")
    # define this party's unique ID (in this demo there is only one party)
    job_queue = generate_code(protocol,
                              conclave_config, ["sharemind"], ["python"],
                              apply_optimizations=True)