def local_main(): current_dir = os.path.dirname(os.path.realpath(__file__)) data_path = os.path.join(current_dir, "data") for pid in {"1", "2"}: # define name for the workflow workflow_name = "aspirin-local-test-" + pid # configure conclave conclave_config = CodeGenConfig(workflow_name, int(pid)) conclave_config.all_pids = [int(pid)] sharemind_conf = SharemindCodeGenConfig("/mnt/shared", use_docker=False, use_hdfs=False) conclave_config.with_sharemind_config(sharemind_conf) # point conclave to the directory where the generated code should be stored/ read from conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) # point conclave to directory where data is to be read from... conclave_config.input_path = data_path # and written to conclave_config.output_path = data_path suffix = "left" if pid == "1" else "right" # define this party's unique ID (in this demo there is only one party) job_queue = generate_code(lambda: protocol_local(suffix, int(pid)), conclave_config, ["sharemind"], ["python"], apply_optimizations=False) dispatch_jobs(job_queue, conclave_config) res_mpc = read_rel(data_path + "/" + "actual_mpc_open.csv") res_left = read_rel(data_path + "/" + "actual_left.csv") res_right = read_rel(data_path + "/" + "actual_right.csv") assert len(res_mpc) == 1 assert len(res_left) == 1 assert len(res_right) == 1 res = [[res_mpc[0][0] + res_left[0][0] + res_right[0][0]]] write_rel(data_path, "actual_open.csv", res, "1")
def run_mpc(pid: str, data_root: str, mpc_backend: str): workflow_name = "aspirin-mpc-join-" + pid + "-" + data_root conclave_config = CodeGenConfig(workflow_name, int(pid)) conclave_config.use_leaky_ops = False conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) conclave_config.input_path = os.path.join("/mnt/shared", data_root) conclave_config.output_path = os.path.join("/mnt/shared", data_root) job_queue = generate_code(lambda: protocol_mpc(conclave_config.all_pids), conclave_config, [mpc_backend], ["python"], apply_optimizations=True) dispatch_jobs(job_queue, conclave_config)
def main_mpc(pid: str, mpc_backend: str): # define name for the workflow workflow_name = "real-aspirin-partitioned-" + pid # configure conclave mpc_backend = sys.argv[2] conclave_config = CodeGenConfig(workflow_name, int(pid)) \ .with_default_mpc_config(mpc_backend) current_dir = os.path.dirname(os.path.realpath(__file__)) # point conclave to the directory where the generated code should be stored/ read from conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) # point conclave to directory where data is to be read from... conclave_config.input_path = os.path.join(current_dir, "data") # and written to conclave_config.output_path = os.path.join(current_dir, "data") job_queue = generate_code(lambda: protocol_mpc(conclave_config.all_pids), conclave_config, [mpc_backend], ["python"], apply_optimizations=True) dispatch_jobs(job_queue, conclave_config)
def run_local(pid: str, data_root: str): workflow_name = "aspirin-local-join-" + pid + "-" + data_root conclave_config = CodeGenConfig(workflow_name, int(pid)) conclave_config.all_pids = [int(pid)] sharemind_conf = SharemindCodeGenConfig("/mnt/shared", use_docker=False, use_hdfs=False) conclave_config.with_sharemind_config(sharemind_conf) conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) conclave_config.input_path = os.path.join("/mnt/shared", data_root) conclave_config.output_path = os.path.join("/mnt/shared", data_root) suffix = "left" if pid == "1" else "right" job_queue = generate_code(lambda: protocol_local(suffix, int(pid)), conclave_config, ["sharemind"], ["python"], apply_optimizations=False) dispatch_jobs(job_queue, conclave_config)
def main(): pid = sys.argv[1] data_root = sys.argv[2] mpc_backend = sys.argv[3] # define name for the workflow workflow_name = "aspirin-large-join-" + pid + "-" + data_root # configure conclave conclave_config = CodeGenConfig(workflow_name, int(pid)) if mpc_backend == "sharemind": sharemind_conf = SharemindCodeGenConfig("/mnt/shared", use_docker=True, use_hdfs=False) conclave_config.with_sharemind_config(sharemind_conf) elif mpc_backend == "obliv-c": conclave_config.all_pids = [1, 2] net_conf = [{ "host": "ca-spark-node-0", "port": 8001 }, { "host": "cb-spark-node-0", "port": 8002 }] net = NetworkConfig(net_conf, int(pid)) conclave_config.with_network_config(net) oc_conf = OblivcConfig("/obliv-c/bin/oblivcc", "ca-spark-node-0:9000") conclave_config.with_oc_config(oc_conf) else: raise Exception("Unknown MPC backend {}".format(mpc_backend)) conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) conclave_config.input_path = os.path.join("/mnt/shared", data_root) conclave_config.output_path = os.path.join("/mnt/shared", data_root) job_queue = generate_code(protocol, conclave_config, [mpc_backend], ["python"], apply_optimizations=True) dispatch_jobs(job_queue, conclave_config)
joined = cc.join(left, right, "joined", ["a"], ["c"]) cc.aggregate(joined, "expected", ["b"], "d", "sum", "total") return {left, right} if __name__ == "__main__": pid = sys.argv[1] # define name for the workflow workflow_name = "simple-oblivious-test-" + pid # configure conclave conclave_config = CodeGenConfig(workflow_name, int(pid)) conclave_config.all_pids = [1] sharemind_conf = SharemindCodeGenConfig("/mnt/shared", use_docker=False, use_hdfs=False) conclave_config.with_sharemind_config(sharemind_conf) current_dir = os.path.dirname(os.path.realpath(__file__)) # point conclave to the directory where the generated code should be stored/ read from conclave_config.code_path = os.path.join("/mnt/shared", workflow_name) # point conclave to directory where data is to be read from... conclave_config.input_path = os.path.join(current_dir, "data") # and written to conclave_config.output_path = os.path.join(current_dir, "data") # define this party's unique ID (in this demo there is only one party) job_queue = generate_code(protocol, conclave_config, ["sharemind"], ["python"], apply_optimizations=False) dispatch_jobs(job_queue, conclave_config)
squared = lang.multiply(input_relation, "squared", "column_b", ["column_b", "column_b"]) # sum group by column_a on column_b and rename group-over column to summed lang.aggregate(squared, "aggregated", ["column_a"], "column_b", "+", "summed") # leaf nodes are automatically written to file so aggregated will be written to ./data/aggregated.csv # return all input relations return {input_relation} if __name__ == "__main__": # define name for the workflow workflow_name = "python-demo" # configure conclave conclave_config = CodeGenConfig(workflow_name) # need the absolute path to current directory current_dir = os.path.dirname(os.path.realpath(__file__)) # point conclave to the directory where the generated code should be stored/ read from conclave_config.code_path = os.path.join(current_dir, workflow_name) # point conclave to directory where data is to be read from... conclave_config.input_path = os.path.join(current_dir, "data") # and written to conclave_config.output_path = os.path.join(current_dir, "data") # define this party's unique ID (in this demo there is only one party) conclave_config.pid = 1 # define all parties involved in this workflow conclave_config.all_pids = [1] # compile and execute protocol, specifying available mpc and local processing backends generate_and_dispatch(protocol, conclave_config, ["sharemind"], ["python"])