def _print_graph(root_dir: str): cf = ConfigFinder(root_dir) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() g = TaskGraph() for pipeline in pipelines: g.add_pipeline(pipeline) g.print_graph()
def _fetch_task_graph(): cf = ConfigFinder(conf.DAGS_DIR) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() task_graph = TaskGraph() for pipeline in pipelines: task_graph.add_pipeline(pipeline) return task_graph
def test_override(self): cf = ConfigFinder(conf.DAGS_DIR) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() for pipe in pipelines: for task in pipe.tasks: if task.name == "spark": spark_task = task self.assertEqual(spark_task.template_parameters["frequency"], "24") self.assertEqual(spark_task.airflow_parameters["retries"], 4)
def test_read_env(self): cf = ConfigFinder(conf.DAGS_DIR) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() for pipe in pipelines: for task in pipe.tasks: print(task.name) if task.name == "batch": batch_task = task self.assertEqual(batch_task.outputs[2].bucket, "cholocal-test")
def collect_dags(): cf = ConfigFinder(conf.DAGS_DIR) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() g = TaskGraph() for pipeline in pipelines: g.add_pipeline(pipeline) dc = DagCreator(g._graph) dags = dc.traverse_graph() return dags
from dagger import conf from dagger.config_finder.config_finder import ConfigFinder from dagger.config_finder.config_processor import ConfigProcessor from dagger.dag_creator.elastic_search.dag_creator import DagCreator from dagger.graph.task_graph import TaskGraph cf = ConfigFinder(conf.DAGS_DIR) cp = ConfigProcessor(cf) pipelines = cp.process_pipeline_configs() g = TaskGraph() for pipeline in pipelines: g.add_pipeline(pipeline) dc = DagCreator(g._graph) dags = dc.traverse_graph()