Esempio n. 1
0
    def _setup_run_job(self, master_instance, job_configs, ctx, job_get,
                       get_instance, create_workflow_dir, get_remote,
                       job_exec_get, job_exec_update):
        job = mock.Mock()
        job.name = "Spark shell job"
        job_get.return_value = job

        create_workflow_dir.return_value = self.workflow_dir

        # This is to mock "with remote.get_remote(master) as r" in run_job
        get_remote.return_value.__enter__ = mock.Mock(
            return_value=master_instance)
        get_instance.return_value = master_instance

        eng = shell_engine.ShellEngine("cluster")
        eng._upload_job_files = mock.Mock()
        eng._upload_job_files.return_value = ['main_script.sh'], []

        job_exec = mock.Mock()
        job_exec.job_configs = job_configs
        status = eng.run_job(job_exec)

        # Check that we launch command on the master node
        get_instance.assert_called_with("cluster", self.master_host)

        return status
Esempio n. 2
0
    def get_edp_engine(self, cluster, job_type):
        if edp_engine.EdpEngine.job_type_supported(job_type):
            return edp_engine.EdpEngine(cluster)

        if shell_engine.ShellEngine.job_type_supported(job_type):
            return shell_engine.ShellEngine(cluster)

        return None