def test_deserialize_2(self): # loading dump_only field raises serialized_dagrun = { 'dag_id': None, 'dag_run_id': 'my-dag-run', 'end_date': None, 'state': 'failed', 'execution_date': self.default_time, 'external_trigger': True, 'start_date': self.default_time, 'conf': {"start": "stop"} } with self.assertRaises(ValidationError): dagrun_schema.load(serialized_dagrun)
def test_autofill_fields(self): """Dag_run_id and execution_date fields are autogenerated if missing""" serialized_dagrun = {} result = dagrun_schema.load(serialized_dagrun) self.assertDictEqual( result, {"execution_date": result["execution_date"], "run_id": result["run_id"]}, )
def post_dag_run(dag_id, session): """Trigger a DAG.""" if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first(): raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") try: post_body = dagrun_schema.load(request.json, session=session) except ValidationError as err: raise BadRequest(detail=str(err)) dagrun_instance = (session.query(DagRun).filter( DagRun.dag_id == dag_id, or_(DagRun.run_id == post_body["run_id"], DagRun.execution_date == post_body["execution_date"]), ).first()) if not dagrun_instance: dag_run = DagRun(dag_id=dag_id, run_type=DagRunType.MANUAL, **post_body) session.add(dag_run) session.commit() return dagrun_schema.dump(dag_run) if dagrun_instance.execution_date == post_body["execution_date"]: raise AlreadyExists( detail=f"DAGRun with DAG ID: '{dag_id}' and " f"DAGRun ExecutionDate: '{post_body['execution_date']}' already exists" ) raise AlreadyExists( detail= f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{post_body['run_id']}' already exists" )
def test_deserialize(self): # Only dag_run_id, execution_date, state, # and conf are loaded. # dag_run_id should be loaded as run_id serialized_dagrun = { 'dag_id': None, 'dag_run_id': 'my-dag-run', 'end_date': None, 'state': 'failed', 'execution_date': self.default_time, 'external_trigger': True, 'start_date': self.default_time, 'conf': '{"start": "stop"}' } result = dagrun_schema.load(serialized_dagrun) self.assertEqual( result.data, { 'run_id': 'my-dag-run', 'execution_date': parse(self.default_time), 'state': 'failed', 'conf': { "start": "stop" } })
def post_dag_run(*, dag_id: str, session: Session = NEW_SESSION) -> APIResponse: """Trigger a DAG.""" dm = session.query(DagModel).filter(DagModel.dag_id == dag_id).first() if not dm: raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") if dm.has_import_errors: raise BadRequest( title="DAG cannot be triggered", detail=f"DAG with dag_id: '{dag_id}' has import errors", ) try: post_body = dagrun_schema.load(get_json_request_dict(), session=session) except ValidationError as err: raise BadRequest(detail=str(err)) logical_date = pendulum.instance(post_body["execution_date"]) run_id = post_body["run_id"] dagrun_instance = (session.query(DagRun).filter( DagRun.dag_id == dag_id, or_(DagRun.run_id == run_id, DagRun.execution_date == logical_date), ).first()) if not dagrun_instance: try: dag = get_airflow_app().dag_bag.get_dag(dag_id) dag_run = dag.create_dagrun( run_type=DagRunType.MANUAL, run_id=run_id, execution_date=logical_date, data_interval=dag.timetable.infer_manual_data_interval( run_after=logical_date), state=DagRunState.QUEUED, conf=post_body.get("conf"), external_trigger=True, dag_hash=get_airflow_app().dag_bag.dags_hash.get(dag_id), ) return dagrun_schema.dump(dag_run) except ValueError as ve: raise BadRequest(detail=str(ve)) if dagrun_instance.execution_date == logical_date: raise AlreadyExists(detail=( f"DAGRun with DAG ID: '{dag_id}' and " f"DAGRun logical date: '{logical_date.isoformat(sep=' ')}' already exists" ), ) raise AlreadyExists( detail= f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{run_id}' already exists" )
def post_dag_run(dag_id, session): """Trigger a DAG.""" if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first(): raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") post_body = dagrun_schema.load(request.json, session=session) dagrun_instance = ( session.query(DagRun).filter(DagRun.dag_id == dag_id, DagRun.run_id == post_body["run_id"]).first() ) if not dagrun_instance: dag_run = DagRun(dag_id=dag_id, run_type=DagRunType.MANUAL, **post_body) session.add(dag_run) session.commit() return dagrun_schema.dump(dag_run) raise AlreadyExists( detail=f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{post_body['run_id']}' already exists" )
def post_dag_run(dag_id, session): """Trigger a DAG.""" if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first(): raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") try: post_body = dagrun_schema.load(request.json, session=session) except ValidationError as err: raise BadRequest(detail=str(err)) logical_date = pendulum.instance(post_body["execution_date"]) run_id = post_body["run_id"] dagrun_instance = ( session.query(DagRun) .filter( DagRun.dag_id == dag_id, or_(DagRun.run_id == run_id, DagRun.execution_date == logical_date), ) .first() ) if not dagrun_instance: try: dag = current_app.dag_bag.get_dag(dag_id) dag_run = dag.create_dagrun( run_type=DagRunType.MANUAL, run_id=run_id, execution_date=logical_date, data_interval=dag.timetable.infer_manual_data_interval(run_after=logical_date), state=State.QUEUED, conf=post_body.get("conf"), external_trigger=True, dag_hash=current_app.dag_bag.dags_hash.get(dag_id), ) return dagrun_schema.dump(dag_run) except ValueError as ve: raise BadRequest(detail=str(ve)) if dagrun_instance.execution_date == logical_date: raise AlreadyExists( detail=( f"DAGRun with DAG ID: '{dag_id}' and " f"DAGRun logical date: '{logical_date.isoformat(sep=' ')}' already exists" ), ) raise AlreadyExists(detail=f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{run_id}' already exists")
def post_dag_run(dag_id, session): """Trigger a DAG.""" if not session.query(DagModel).filter(DagModel.dag_id == dag_id).first(): raise NotFound(title="DAG not found", detail=f"DAG with dag_id: '{dag_id}' not found") try: post_body = dagrun_schema.load(request.json, session=session) except ValidationError as err: raise BadRequest(detail=str(err)) execution_date = post_body["execution_date"] run_id = post_body["run_id"] dagrun_instance = (session.query(DagRun).filter( DagRun.dag_id == dag_id, or_(DagRun.run_id == run_id, DagRun.execution_date == execution_date), ).first()) if not dagrun_instance: dag_run = current_app.dag_bag.get_dag(dag_id).create_dagrun( run_type=DagRunType.MANUAL, run_id=run_id, execution_date=execution_date, state=State.QUEUED, conf=post_body.get("conf"), external_trigger=True, dag_hash=current_app.dag_bag.dags_hash.get(dag_id), ) return dagrun_schema.dump(dag_run) if dagrun_instance.execution_date == execution_date: raise AlreadyExists( detail=f"DAGRun with DAG ID: '{dag_id}' and " f"DAGRun ExecutionDate: '{post_body['execution_date']}' already exists" ) raise AlreadyExists( detail= f"DAGRun with DAG ID: '{dag_id}' and DAGRun ID: '{post_body['run_id']}' already exists" )
def test_deserialize_2(self): # Invalid state field should return None serialized_dagrun = { 'dag_id': None, 'dag_run_id': 'my-dag-run', 'end_date': None, 'state': 'faileds', 'execution_date': self.default_time, 'external_trigger': True, 'start_date': self.default_time, 'conf': { "start": "stop" } } result = dagrun_schema.load(serialized_dagrun) self.assertEqual( result.data, { 'run_id': 'my-dag-run', 'execution_date': parse(self.default_time), 'conf': { "start": "stop" } })
def test_deserialize(self, serialized_dagrun, expected_result): result = dagrun_schema.load(serialized_dagrun) self.assertDictEqual(result, expected_result)
def test_invalid_execution_date_raises(self): serialized_dagrun = {"execution_date": "mydate"} with self.assertRaises(BadRequest) as e: dagrun_schema.load(serialized_dagrun) self.assertEqual(str(e.exception), "Incorrect datetime argument")
def test_invalid_execution_date_raises(self): serialized_dagrun = {"execution_date": "mydate"} with pytest.raises(BadRequest) as ctx: dagrun_schema.load(serialized_dagrun) assert str(ctx.value) == "Incorrect datetime argument"
def test_deserialize(self, serialized_dagrun, expected_result): result = dagrun_schema.load(serialized_dagrun) assert result == expected_result