def __init__(self,
              executor: PythonObjectExecutor) -> None:
     super().__init__()
     self.executor: Executor = deserialize(executor.python_object)
     if not isinstance(self.executor, Executor):
         raise Exception("python ai flow only support Executor class but config executor is {}"
                         .format(type(self.executor)))
def stream_run_func(context: JobContext, graph: RunGraph):
    executors: List[Executor] = []
    contexts: List[FunctionContext] = []
    for index in range(len(graph.nodes)):
        caller: Executor = deserialize(graph.executor_bytes[index])
        executors.append(caller)
        node: AINode = graph.nodes[index]
        function_context = FunctionContext(node_spec=node, job_context=context)
        contexts.append(function_context)

    def setup():
        for ii in range(len(executors)):
            cc = executors[ii]
            cc.setup(contexts[ii])

    def close():
        for ii in range(len(executors)):
            cc = executors[ii]
            cc.close(contexts[ii])

    setup()
    value_map = {}
    for i in range(len(graph.nodes)):
        node = graph.nodes[i]
        c = executors[i]
        if node.instance_id in graph.dependencies:
            ds = graph.dependencies[node.instance_id]
            params = []
            for d in ds:
                params.append(value_map[d.target_node_id][d.port])
            value_map[node.instance_id] = c.execute(contexts[i], params)
        else:
            value_map[node.instance_id] = c.execute(contexts[i], [])
    close()
Exemplo n.º 3
0
 def __init__(self, executor: FlinkPythonExecutor) -> None:
     super().__init__()
     self.executor: Union[Executor, SinkExecutor, SourceExecutor] \
         = serialization_utils.deserialize(executor.python_object)
     if not isinstance(self.executor, Executor) \
             and not isinstance(self.executor, SourceExecutor) \
             and not isinstance(self.executor, SinkExecutor):
         raise Exception(
             "python ai flow only support Executor, SourceExecutor, SinkExecutor class "
             "but config executor is {}".format(type(self.executor)))
def batch_run_func(context: JobContext, graph: RunGraph):
    value_map = {}
    for i in range(len(graph.nodes)):
        node = graph.nodes[i]
        function_context: FunctionContext = FunctionContext(node_spec=node, job_context=context)
        c: Executor = deserialize(graph.executor_bytes[i])
        c.setup(function_context)
        if node.instance_id in graph.dependencies:
            ds = graph.dependencies[node.instance_id]
            params = []
            for d in ds:
                params.append(value_map[d.target_node_id][d.port])
            value_map[node.instance_id] = c.execute(function_context, params)
        else:
            value_map[node.instance_id] = c.execute(function_context, [])
        c.close(function_context)
 def get_table_env_create_func(self):
     return serialization_utils.deserialize(self.table_env_create_func)
Exemplo n.º 6
0
def read_config_file(file_path):
    with open(file_path, 'rb') as f:
        config_bytes = f.read()
        return serialization_utils.deserialize(config_bytes)
Exemplo n.º 7
0
 def stream_executor(self) -> Executor:
     executor: ExampleExecutor = deserialize(self.example_node.executor.python_object)
     executor.example_meta = self.example_meta
     return executor
Exemplo n.º 8
0
def read_from_serialized_file(file_path):
    with open(file_path, 'rb') as f:
        serialized_bytes = f.read()
    return deserialize(serialized_bytes)