Ejemplo n.º 1
0
    def generate(self, sub_graph: AISubGraph, project_desc: ProjectDesc) -> AbstractJob:
        """
        Generate local flink job.

        :param sub_graph: Sub graph generates from ai nodes.
        :param project_desc: Description of the project.
        :return: Base job Object.
        """
        if sub_graph.config.exec_mode == ExecutionMode.BATCH:
            flink_context = JobContext(ExecutionMode.BATCH)
        else:
            flink_context = JobContext(ExecutionMode.STREAM)

        flink_context.project_config = project_desc.project_config
        local_flink_job_config: LocalFlinkJobConfig = sub_graph.config
        job = LocalFlinkJob(ai_graph=sub_graph, job_context=flink_context, job_config=local_flink_job_config)

        if job.job_config.language_type is None:
            job.job_config.language_type = self.get_language_type(sub_graph)
        # set jar and class
        job.job_config.main_class = version.main_class
        job.job_config.jar_path = version.jar_path

        # set python main
        job.job_config.py_entry_file = version.py_main_file
        return job
 def generate(self, sub_graph: AISubGraph, project_desc: ProjectDesc) -> AbstractJob:
     assert 1 == len(sub_graph.nodes)
     node: ExecutableNode = list(sub_graph.nodes.values())[0]
     if sub_graph.config.exec_mode == ExecutionMode.BATCH:
         context = JobContext(ExecutionMode.BATCH)
     else:
         context = JobContext(ExecutionMode.STREAM)
     executor: CmdExecutor = node.executor
     return KubernetesCMDJob(job_context=context, exec_cmd=executor.cmd_line, job_config=sub_graph.config)
Ejemplo n.º 3
0
 def generate(self, sub_graph: AISubGraph,
              project_desc: ProjectDesc) -> VVPJob:
     if sub_graph.config.exec_mode == ExecutionMode.BATCH:
         flink_context = JobContext(ExecutionMode.BATCH)
     else:
         flink_context = JobContext(ExecutionMode.STREAM)
     flink_context.project_config = project_desc.project_config
     job_config: VVPJobConfig = sub_graph.config
     job = VVPJob(job_context=flink_context, job_config=job_config)
     return job
Ejemplo n.º 4
0
    def generate(self, sub_graph: AISubGraph, project_desc: ProjectDesc) -> KubernetesPythonJob:
        if sub_graph.config.exec_mode == ExecutionMode.BATCH:
            run_func = serialize(batch_run_func)
            py_context = JobContext(ExecutionMode.BATCH)
        else:
            run_func = serialize(stream_run_func)
            py_context = JobContext(ExecutionMode.STREAM)

        py_context.project_config = project_desc.project_config
        run_graph: RunGraph = self.build_run_graph(sub_graph, py_context)
        job_config: KubernetesPythonJobConfig = sub_graph.config

        return KubernetesPythonJob(run_graph=run_graph, run_func=run_func,
                                   job_context=py_context, job_config=job_config)
    def compile_node(self, node: AINode, context: JobContext) -> Executor:
        exec_mode: ExecutionMode = context.get_execution_mode()

        def component_execute(component: BaseComponent):
            if exec_mode == ExecutionMode.BATCH:
                executor = component.batch_executor()
            else:
                executor = component.stream_executor()
            return executor

        executor: Executor = None
        if isinstance(node, Example):
            n: Example = node
            if node.executor is not None:
                example_type = 'udf'
            else:
                example_type = n.example_meta.data_type
            if example_type != 'udf' and example_type != "pandas" and example_type != "numpy":
                raise Exception(
                    "Base example node only supports pandas and numpy now")

            example_component_type: type(
                BaseExampleComponent) = self.example_registry.get_object(
                    example_type)
            example_component = example_component_type(node, context)
            executor = component_execute(example_component)
        elif isinstance(node, ExecutableNode):
            n: ExecutableNode = node
            c = ExecuteComponent(n, context)
            executor = component_execute(c)
        else:
            pass
        return executor
 def __init__(self,
              run_func: bytes,
              run_graph: Optional[RunGraph],
              job_context: JobContext = JobContext(),
              job_config: LocalPythonJobConfig = LocalPythonJobConfig()):
     super().__init__(job_context=job_context, job_config=job_config)
     self.run_func = run_func
     self.run_graph = run_graph
     self.exec_func_file: Text = None
     self.exec_args_file: Text = None
 def __init__(self,
              exec_cmd,
              job_context: JobContext = JobContext(),
              job_config: AbstractJobConfig = KubernetesCMDJobConfig()):
     super().__init__(job_context, job_config)
     self.exec_cmd = exec_cmd
Ejemplo n.º 8
0
 def __init__(self,
              run_func: bytes,
              run_graph: Optional[RunGraph],
              job_context: JobContext = JobContext(),
              job_config: KubernetesPythonJobConfig = KubernetesPythonJobConfig()):
     super().__init__(run_func, run_graph, job_context, job_config)
 def __init__(self,
              job_context: JobContext = JobContext(),
              job_config: AbstractJobConfig = LocalDummyJobConfig()):
     super().__init__(job_context, job_config)