def __init__(self,
                 platform: Text = None,
                 engine: Text = None,
                 job_name: Text = None,
                 periodic_config: PeriodicConfig = None,
                 exec_mode: Optional[ExecutionMode] = ExecutionMode.BATCH,
                 properties: Dict[Text, Jsonable] = None) -> None:
        """
        Set platform and engine in base job config.

        :param platform: Platform of the configured job. It can be local or kubernetes.
        :param engine: Engine of the configured job. It can be python, cmd_line flink or other available engine.
        :param job_name: Name of the configured job.
        :param properties: Properties of the configured job.
        """
        super().__init__()
        self.platform: Text = platform
        self.engine: Text = engine
        self.project_desc: ProjectDesc = ProjectDesc()
        self.project_path: Text = None
        self.project_local_path: Text = None
        if properties is None:
            self.properties: Dict[Text, Jsonable] = {}
        else:
            self.properties: Dict[Text, Jsonable] = properties
        self.properties['clean_job_resource'] = "True"
        self.periodic_config: PeriodicConfig = periodic_config
        self.exec_mode: Optional[ExecutionMode] = exec_mode
        self.job_name: Text = job_name
    def create_bash_workflow() -> Workflow:
        workflow = Workflow()
        workflow.project_desc = ProjectDesc()
        workflow.project_desc.project_name = "workflow_1"
        for i in range(3):
            job = TestDAGGenerator.create_bash_job(i)
            workflow.add_job(job)
        deps_1 = [
            JobControlEdge(target_node_id='',
                           source_node_id='1_job',
                           met_config=MetConfig(
                               event_key='key_1',
                               event_value='value_1',
                               event_type=UNDEFINED_EVENT_TYPE))
        ]
        deps_2 = [
            JobControlEdge(target_node_id='',
                           source_node_id='2_job',
                           met_config=MetConfig(
                               event_key='key_2',
                               event_value='value_2',
                               event_type=UNDEFINED_EVENT_TYPE))
        ]

        workflow.add_edges("1_job", deps_1)
        workflow.add_edges("2_job", deps_2)
        workflow.workflow_id = 1
        return workflow
Esempio n. 3
0
def deploy_to_airflow(
        project_desc: ProjectDesc = ProjectDesc(), dag_id: Text = None):
    """
    Run project under the current project path.

    :param project_desc: The description of the project..
    :param dag_id: The airflow dag id.
    :return: Workflow id.
    """
    if dag_id is None:
        dag_id = project_desc.project_name
    deploy_path = project_desc.project_config.get_airflow_deploy_path()
    if deploy_path is None:
        raise Exception("airflow_deploy_path config not set!")
    airflow_file_path = deploy_path + '/' + dag_id + '.py'
    if os.path.exists(airflow_file_path):
        os.remove(airflow_file_path)

    generated_code = _generate_airflow_file_text(ai_graph=default_graph(),
                                                 project_desc=project_desc,
                                                 dag_id=dag_id)
    with NamedTemporaryFile(mode='w+t',
                            prefix=dag_id,
                            suffix='.py',
                            dir='/tmp',
                            delete=False) as f:
        f.write(generated_code)
    os.rename(f.name, airflow_file_path)
    return airflow_file_path, generated_code
Esempio n. 4
0
    def create_workflow() -> Workflow:
        workflow = Workflow()
        workflow.project_desc = ProjectDesc()
        workflow.project_desc.project_name = "workflow_1"
        for i in range(6):
            job = TestDAGGenerator.create_dummy_job(i)
            if i == 2:
                job.job_config = SendEventJobConfig('localhost:50051', 'key_1',
                                                    'value_1',
                                                    UNDEFINED_EVENT_TYPE)
            elif i == 3:
                job.job_config = SendEventJobConfig('localhost:50051', 'key_2',
                                                    'value_2',
                                                    UNDEFINED_EVENT_TYPE)
            elif i == 5:
                job.job_config = SendEventJobConfig('localhost:50051', 'key_2',
                                                    'value_2',
                                                    "STOP_SCHEDULER_CMD")
            workflow.add_job(job)
        dependencies = [
            JobControlEdge(target_node_id='0_job',
                           source_node_id='2_job',
                           met_config=MetConfig(
                               event_key=generate_job_status_key('0_job'),
                               event_value=State.FINISHED.value)),
            JobControlEdge(target_node_id='1_job',
                           source_node_id='2_job',
                           met_config=MetConfig(
                               event_key=generate_job_status_key('1_job'),
                               event_value=State.FINISHED.value))
        ]
        workflow.add_edges("2_job", dependencies)

        dependencies = [
            JobControlEdge(target_node_id='2_job',
                           source_node_id='4_job',
                           met_config=MetConfig(
                               event_key='key_1',
                               event_value='value_1',
                               event_type=UNDEFINED_EVENT_TYPE)),
            JobControlEdge(target_node_id='3_job',
                           source_node_id='4_job',
                           met_config=MetConfig(
                               event_key='key_2',
                               event_value='value_2',
                               event_type=UNDEFINED_EVENT_TYPE))
        ]
        workflow.add_edges("4_job", dependencies)

        dependencies = [
            JobControlEdge(target_node_id='4_job',
                           source_node_id='5_job',
                           met_config=MetConfig(
                               event_key=generate_job_status_key('5_job'),
                               event_value=State.FINISHED.value))
        ]
        workflow.add_edges("5_job", dependencies)
        workflow.workflow_id = 1
        return workflow
Esempio n. 5
0
def submit_ai_flow(ai_graph: AIGraph = default_graph(),
                   project_desc: ProjectDesc = ProjectDesc()) -> Optional[int]:
    """
    Submit ai flow to schedule.

    :param ai_graph: The ai graph constructed from project.
    :param project_desc: The project description.
    :return: Workflow id.
    """
    ex_workflow = generate_workflow(ai_graph, project_desc)
    _default_project.upload_project_package(ex_workflow)
    return _default_project.submit_workflow(ex_workflow=ex_workflow)
Esempio n. 6
0
def _generate_airflow_file_text(ai_graph: AIGraph = default_graph(),
                                project_desc: ProjectDesc = ProjectDesc(),
                                dag_id=None) -> Optional[str]:
    """
    Submit ai flow to schedule.

    :param ai_graph: The ai graph constructed from project.
    :param project_desc: The project description.
    :return: Workflow id.
    """
    ex_workflow = generate_workflow(ai_graph, project_desc)
    for job in ex_workflow.jobs.values():
        register_job_meta(workflow_id=ex_workflow.workflow_id, job=job)
    _default_project.upload_project_package(ex_workflow)
    return DAGGenerator().generator(ex_workflow, dag_id)
Esempio n. 7
0
def _submit_to_airflow(
        project_desc: ProjectDesc = ProjectDesc(), dag_id: Text = None):
    airflow_operation = AirflowOperation(
        notification_server_uri=project_desc.project_config.
        get_notification_service_uri())
    return airflow_operation.trigger_workflow_execution(dag_id)