Esempio n. 1
0
def generate(dag, name):

    cfg = CodeGenConfig('cfg')
    cg = SparkCodeGen(cfg, dag)

    actual = cg._generate('code', '/tmp')[1]

    with open('/tmp/' + name + '.py', 'w') as out:
        out.write(actual)
Esempio n. 2
0
    def check_workflow(self, dag, name):
        expected_rootdir = \
            "{}/spark_expected".format(os.path.dirname(os.path.realpath(__file__)))

        cfg = CodeGenConfig('cfg')
        cg = SparkCodeGen(cfg, dag)

        actual = cg._generate('code', '/tmp')[1]

        with open(expected_rootdir + '/{}'.format(name), 'r') as f:
            expected = f.read()

        self.assertEqual(expected, actual)
Esempio n. 3
0
def generate_code(protocol: callable,
                  conclave_config: CodeGenConfig,
                  mpc_frameworks: list,
                  local_frameworks: list,
                  apply_optimizations: bool = True):
    """
    Applies optimization rewrite passes to protocol, partitions resulting condag, and generates backend specific code for
    each sub-condag.
    :param protocol: protocol to compile
    :param conclave_config: conclave configuration
    :param mpc_frameworks: available mpc backend frameworks
    :param local_frameworks: available local-processing backend frameworks
    :param apply_optimizations: flag indicating if optimization rewrite passes should be applied to condag
    :return: queue of job objects to be executed by dispatcher
    """

    # currently only allow one local and one mpc framework
    assert len(mpc_frameworks) == 1 and len(local_frameworks) == 1

    # set up code gen config object
    if isinstance(conclave_config, CodeGenConfig):
        cfg = conclave_config
    else:
        cfg = CodeGenConfig.from_dict(conclave_config)

    # apply optimizations
    dag = condag.OpDag(protocol())
    # only apply optimizations if required
    if apply_optimizations:
        dag = comp.rewrite_dag(dag)
    # partition into subdags that will run in specific frameworks
    mapping = part.heupart(dag, mpc_frameworks, local_frameworks)
    # for each sub condag run code gen and add resulting job to job queue
    job_queue = []
    for job_num, (framework, sub_dag, stored_with) in enumerate(mapping):
        print(job_num, framework)
        if framework == "sharemind":
            name = "{}-sharemind-job-{}".format(cfg.name, job_num)
            job = SharemindCodeGen(cfg, sub_dag,
                                   cfg.pid).generate(name, cfg.output_path)
            job_queue.append(job)
        elif framework == "spark":
            name = "{}-spark-job-{}".format(cfg.name, job_num)
            job = SparkCodeGen(cfg, sub_dag).generate(name, cfg.output_path)
            job_queue.append(job)
        elif framework == "python":
            name = "{}-python-job-{}".format(cfg.name, job_num)
            job = PythonCodeGen(cfg, sub_dag).generate(name, cfg.output_path)
            job_queue.append(job)
        else:
            raise Exception("Unknown framework: " + framework)

        # TODO: this probably doesn't belong here
        if conclave_config.pid not in stored_with:
            job.skip = True
    return job_queue
Esempio n. 4
0
    def generate(self, job_name: str, output_directory: str):

        if self.fmwk == "python":

            code = PythonCodeGen(self.config,
                                 self.dag)._generate(job_name,
                                                     output_directory)[1]
            self._write_python_code(code, job_name)

        elif self.fmwk == "spark":

            code = SparkCodeGen(self.config,
                                self.dag)._generate(job_name,
                                                    output_directory)[1]
            self._write_spark_code(code, job_name)

        else:

            raise Exception("Unknown framework: {}".format(self.fmwk))

        job = SinglePartyJob(self.fmwk, job_name, output_directory,
                             self.config.compute_party, self.config.all_pids)

        return job
Esempio n. 5
0
        defCol("b", "INTEGER", [1]),
        defCol("c", "INTEGER", [1]),
        defCol("d", "INTEGER", [1])
    ]

    in1 = sal.create("in1", colsIn1, set([1]))

    in2 = sal.create("in2", colsIn1, set([1]))

    return [in1, in2]


@dag_only
def agg():

    in1 = setup()[0]

    agg = sal.aggregate(in1, "agg", ["a", "b"], "c", "sum", "agg1")

    out = sal.collect(agg, 1)

    return set([in1])


if __name__ == "__main__":

    dag_agg = agg()
    cfg_agg = CodeGenConfig('agg')
    cg_agg = SparkCodeGen(cfg_agg, dag_agg)
    cg_agg.generate('agg', '/tmp')
Esempio n. 6
0
def generate_code(protocol: callable, cfg: CodeGenConfig, mpc_frameworks: list,
                  local_frameworks: list, apply_optimizations: bool = True):
    """
    Applies optimization rewrite passes to protocol, partitions resulting dag, and generates backend specific code
    for each sub-dag.
    :param protocol: protocol to compile
    :param cfg: conclave configuration
    :param mpc_frameworks: available mpc backend frameworks
    :param local_frameworks: available local-processing backend frameworks
    :param apply_optimizations: flag indicating if optimization rewrite passes should be applied to condag
    :return: queue of job objects to be executed by dispatcher
    """

    dag = condag.OpDag(protocol())
    job_queue = []

    if "single-party-spark" not in set(mpc_frameworks) and "single-party-python" not in set(mpc_frameworks):

        # currently only allow one local and one mpc framework
        assert len(mpc_frameworks) == 1 and len(local_frameworks) == 1

        # only apply optimizations if required
        if apply_optimizations:
            dag = comp.rewrite_dag(dag, all_parties=cfg.all_pids, use_leaky_ops=cfg.use_leaky_ops)

        # partition into sub-dags that will run in specific frameworks
        mapping = part.heupart(dag, mpc_frameworks, local_frameworks)

        # for each sub-dag run code gen and add resulting job to job queue
        for job_num, (framework, sub_dag, stored_with) in enumerate(mapping):
            print(job_num, framework)
            if framework == "sharemind":
                name = "{}-sharemind-job-{}".format(cfg.name, job_num)
                job = SharemindCodeGen(cfg, sub_dag, cfg.pid).generate(name, cfg.output_path)
                job_queue.append(job)
            elif framework == "spark":
                name = "{}-spark-job-{}".format(cfg.name, job_num)
                job = SparkCodeGen(cfg, sub_dag).generate(name, cfg.output_path)
                job_queue.append(job)
            elif framework == "python":
                name = "{}-python-job-{}".format(cfg.name, job_num)
                job = PythonCodeGen(cfg, sub_dag).generate(name, cfg.output_path)
                job_queue.append(job)
            elif framework == "obliv-c":
                name = "{}-oblivc-job-{}".format(cfg.name, job_num)
                job = OblivcCodeGen(cfg, sub_dag, cfg.pid).generate(name, cfg.output_path)
                job_queue.append(job)
            elif framework == "jiff":
                name = "{}-jiff-job-{}".format(cfg.name, job_num)
                job = JiffCodeGen(cfg, sub_dag, cfg.pid).generate(name, cfg.output_path)
                job_queue.append(job)
            else:
                raise Exception("Unknown framework: " + framework)

            # TODO: this probably doesn't belong here
            if cfg.pid not in stored_with:
                job.skip = True

    else:

        assert len(mpc_frameworks) == 1

        if mpc_frameworks[0] == "single-party-spark":

            name = "{}-spark-job-0".format(cfg.name)
            job = SinglePartyCodegen(cfg, dag, "spark").generate(name, cfg.output_path)
            job_queue.append(job)

        elif mpc_frameworks[0] == "single-party-python":

            name = "{}-python-job-0".format(cfg.name)
            job = SinglePartyCodegen(cfg, dag, "python").generate(name, cfg.output_path)
            job_queue.append(job)

        else:

            raise Exception("Unknown framework: {}".format(mpc_frameworks[0]))

    return job_queue