def get_dag_runs_batch(session): """ Get list of DAG Runs """ body = request.get_json() try: data = dagruns_batch_form_schema.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) query = session.query(DagRun) if data["dag_ids"]: query = query.filter(DagRun.dag_id.in_(data["dag_ids"])) dag_runs, total_entries = _fetch_dag_runs( query, session, data["end_date_gte"], data["end_date_lte"], data["execution_date_gte"], data["execution_date_lte"], data["start_date_gte"], data["start_date_lte"], data["page_limit"], data["page_offset"], ) return dagrun_collection_schema.dump(DAGRunCollection(dag_runs=dag_runs, total_entries=total_entries))
def get_dag_runs_batch(session): """Get list of DAG Runs""" body = request.get_json() try: data = dagruns_batch_form_schema.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) appbuilder = current_app.appbuilder readable_dag_ids = appbuilder.sm.get_readable_dag_ids(g.user) query = session.query(DagRun) if data.get("dag_ids"): dag_ids = set(data["dag_ids"]) & set(readable_dag_ids) query = query.filter(DagRun.dag_id.in_(dag_ids)) else: query = query.filter(DagRun.dag_id.in_(readable_dag_ids)) dag_runs, total_entries = _fetch_dag_runs( query, data["end_date_gte"], data["end_date_lte"], data["execution_date_gte"], data["execution_date_lte"], data["start_date_gte"], data["start_date_lte"], data["page_limit"], data["page_offset"], order_by=data.get('order_by', "id"), ) return dagrun_collection_schema.dump( DAGRunCollection(dag_runs=dag_runs, total_entries=total_entries))
def get_dag_runs( session, dag_id, start_date_gte=None, start_date_lte=None, execution_date_gte=None, execution_date_lte=None, end_date_gte=None, end_date_lte=None, offset=None, limit=None, ): """Get all DAG Runs.""" query = session.query(DagRun) # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. if dag_id == "~": appbuilder = current_app.appbuilder query = query.filter(DagRun.dag_id.in_(appbuilder.sm.get_readable_dag_ids(g.user))) else: query = query.filter(DagRun.dag_id == dag_id) dag_run, total_entries = _fetch_dag_runs( query, end_date_gte, end_date_lte, execution_date_gte, execution_date_lte, start_date_gte, start_date_lte, limit, offset, ) return dagrun_collection_schema.dump(DAGRunCollection(dag_runs=dag_run, total_entries=total_entries))
def get_dag_runs( session, dag_id, start_date_gte=None, start_date_lte=None, execution_date_gte=None, execution_date_lte=None, end_date_gte=None, end_date_lte=None, offset=None, limit=None, ): """ Get all DAG Runs. """ query = session.query(DagRun) # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. if dag_id != "~": query = query.filter(DagRun.dag_id == dag_id) dag_run, total_entries = _fetch_dag_runs(query, session, end_date_gte, end_date_lte, execution_date_gte, execution_date_lte, start_date_gte, start_date_lte, limit, offset) return dagrun_collection_schema.dump(DAGRunCollection(dag_runs=dag_run, total_entries=total_entries))
def test_serialize(self, session): dagrun_model_1 = DagRun( run_id="my-dag-run", execution_date=timezone.parse(self.default_time), run_type=DagRunType.MANUAL.value, start_date=timezone.parse(self.default_time), conf='{"start": "stop"}', ) dagrun_model_2 = DagRun( run_id="my-dag-run-2", execution_date=timezone.parse(self.default_time), start_date=timezone.parse(self.default_time), run_type=DagRunType.MANUAL.value, ) dagruns = [dagrun_model_1, dagrun_model_2] session.add_all(dagruns) session.commit() instance = DAGRunCollection(dag_runs=dagruns, total_entries=2) deserialized_dagruns = dagrun_collection_schema.dump(instance) self.assertEqual( deserialized_dagruns, { "dag_runs": [ { "dag_id": None, "dag_run_id": "my-dag-run", "end_date": None, "execution_date": self.default_time, "external_trigger": True, "state": "running", "start_date": self.default_time, "conf": { "start": "stop" }, }, { "dag_id": None, "dag_run_id": "my-dag-run-2", "end_date": None, "state": "running", "execution_date": self.default_time, "external_trigger": True, "start_date": self.default_time, "conf": {}, }, ], "total_entries": 2, }, )
def get_dag_runs( session, dag_id, start_date_gte=None, start_date_lte=None, execution_date_gte=None, execution_date_lte=None, end_date_gte=None, end_date_lte=None, offset=None, limit=None, ): """ Get all DAG Runs. """ query = session.query(DagRun) # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. if dag_id != "~": query = query.filter(DagRun.dag_id == dag_id) # filter start date if start_date_gte: query = query.filter(DagRun.start_date >= start_date_gte) if start_date_lte: query = query.filter(DagRun.start_date <= start_date_lte) # filter execution date if execution_date_gte: query = query.filter(DagRun.execution_date >= execution_date_gte) if execution_date_lte: query = query.filter(DagRun.execution_date <= execution_date_lte) # filter end date if end_date_gte: query = query.filter(DagRun.end_date >= end_date_gte) if end_date_lte: query = query.filter(DagRun.end_date <= end_date_lte) # apply offset and limit dag_run = query.order_by(DagRun.id).offset(offset).limit(limit).all() total_entries = session.query(func.count(DagRun.id)).scalar() return dagrun_collection_schema.dump( DAGRunCollection(dag_runs=dag_run, total_entries=total_entries) )
def test_serialize(self, session): dagrun_model_1 = DagRun( run_id='my-dag-run', execution_date=timezone.parse(self.default_time), run_type=DagRunType.MANUAL.value, start_date=timezone.parse(self.default_time), conf='{"start": "stop"}' ) dagrun_model_2 = DagRun( run_id='my-dag-run-2', execution_date=timezone.parse(self.default_time), start_date=timezone.parse(self.default_time), run_type=DagRunType.MANUAL.value, ) dagruns = [dagrun_model_1, dagrun_model_2] session.add_all(dagruns) session.commit() instance = DAGRunCollection(dag_runs=dagruns, total_entries=2) deserialized_dagruns = dagrun_collection_schema.dump(instance) self.assertEqual( deserialized_dagruns, { 'dag_runs': [ { 'dag_id': None, 'dag_run_id': 'my-dag-run', 'end_date': None, 'execution_date': self.default_time, 'external_trigger': True, 'state': 'running', 'start_date': self.default_time, 'conf': {"start": "stop"} }, { 'dag_id': None, 'dag_run_id': 'my-dag-run-2', 'end_date': None, 'state': 'running', 'execution_date': self.default_time, 'external_trigger': True, 'start_date': self.default_time, 'conf': {} } ], 'total_entries': 2 } )
def get_dag_runs( *, dag_id: str, start_date_gte: Optional[str] = None, start_date_lte: Optional[str] = None, execution_date_gte: Optional[str] = None, execution_date_lte: Optional[str] = None, end_date_gte: Optional[str] = None, end_date_lte: Optional[str] = None, state: Optional[List[str]] = None, offset: Optional[int] = None, limit: Optional[int] = None, order_by: str = "id", session: Session = NEW_SESSION, ): """Get all DAG Runs.""" query = session.query(DagRun) # This endpoint allows specifying ~ as the dag_id to retrieve DAG Runs for all DAGs. if dag_id == "~": appbuilder = get_airflow_app().appbuilder query = query.filter( DagRun.dag_id.in_(appbuilder.sm.get_readable_dag_ids(g.user))) else: query = query.filter(DagRun.dag_id == dag_id) if state: query = query.filter(DagRun.state.in_(state)) dag_run, total_entries = _fetch_dag_runs( query, end_date_gte=end_date_gte, end_date_lte=end_date_lte, execution_date_gte=execution_date_gte, execution_date_lte=execution_date_lte, start_date_gte=start_date_gte, start_date_lte=start_date_lte, limit=limit, offset=offset, order_by=order_by, ) return dagrun_collection_schema.dump( DAGRunCollection(dag_runs=dag_run, total_entries=total_entries))
def get_dag_runs_batch(*, session: Session = NEW_SESSION) -> APIResponse: """Get list of DAG Runs""" body = get_json_request_dict() try: data = dagruns_batch_form_schema.load(body) except ValidationError as err: raise BadRequest(detail=str(err.messages)) appbuilder = get_airflow_app().appbuilder readable_dag_ids = appbuilder.sm.get_readable_dag_ids(g.user) query = session.query(DagRun) if data.get("dag_ids"): dag_ids = set(data["dag_ids"]) & set(readable_dag_ids) query = query.filter(DagRun.dag_id.in_(dag_ids)) else: query = query.filter(DagRun.dag_id.in_(readable_dag_ids)) states = data.get("states") if states: query = query.filter(DagRun.state.in_(states)) dag_runs, total_entries = _fetch_dag_runs( query, end_date_gte=data["end_date_gte"], end_date_lte=data["end_date_lte"], execution_date_gte=data["execution_date_gte"], execution_date_lte=data["execution_date_lte"], start_date_gte=data["start_date_gte"], start_date_lte=data["start_date_lte"], limit=data["page_limit"], offset=data["page_offset"], order_by=data.get("order_by", "id"), ) return dagrun_collection_schema.dump( DAGRunCollection(dag_runs=dag_runs, total_entries=total_entries))