示例#1
0
 def test_serialize(self, session):
     event_log_model_1 = Log(event="TEST_EVENT_1", task_instance=self._create_task_instance())
     event_log_model_2 = Log(event="TEST_EVENT_2", task_instance=self._create_task_instance())
     event_logs = [event_log_model_1, event_log_model_2]
     session.add_all(event_logs)
     session.commit()
     event_log_model_1.dttm = timezone.parse(self.default_time)
     event_log_model_2.dttm = timezone.parse(self.default_time2)
     instance = EventLogCollection(event_logs=event_logs, total_entries=2)
     deserialized_event_logs = event_log_collection_schema.dump(instance)
     assert deserialized_event_logs == {
         "event_logs": [
             {
                 "event_log_id": event_log_model_1.id,
                 "event": "TEST_EVENT_1",
                 "dag_id": "TEST_DAG_ID",
                 "task_id": "TEST_TASK_ID",
                 "execution_date": self.default_time,
                 "owner": 'airflow',
                 "when": self.default_time,
                 "extra": None,
             },
             {
                 "event_log_id": event_log_model_2.id,
                 "event": "TEST_EVENT_2",
                 "dag_id": "TEST_DAG_ID",
                 "task_id": "TEST_TASK_ID",
                 "execution_date": self.default_time,
                 "owner": 'airflow',
                 "when": self.default_time2,
                 "extra": None,
             },
         ],
         "total_entries": 2,
     }
 def test_should_respond_200(self, session):
     log_model_1 = Log(
         event='TEST_EVENT_1',
         task_instance=self._create_task_instance(),
     )
     log_model_2 = Log(
         event='TEST_EVENT_2',
         task_instance=self._create_task_instance(),
     )
     log_model_3 = Log(event="cli_scheduler",
                       owner='root',
                       extra='{"host_name": "e24b454f002a"}')
     log_model_1.dttm = timezone.parse(self.default_time)
     log_model_2.dttm = timezone.parse(self.default_time_2)
     log_model_3.dttm = timezone.parse(self.default_time_2)
     session.add_all([log_model_1, log_model_2, log_model_3])
     session.commit()
     response = self.client.get("/api/v1/eventLogs",
                                environ_overrides={'REMOTE_USER': "******"})
     assert response.status_code == 200
     self.assertEqual(
         response.json,
         {
             "event_logs": [
                 {
                     "event_log_id": log_model_1.id,
                     "event": "TEST_EVENT_1",
                     "dag_id": "TEST_DAG_ID",
                     "task_id": "TEST_TASK_ID",
                     "execution_date": self.default_time,
                     "owner": 'airflow',
                     "when": self.default_time,
                     "extra": None,
                 },
                 {
                     "event_log_id": log_model_2.id,
                     "event": "TEST_EVENT_2",
                     "dag_id": "TEST_DAG_ID",
                     "task_id": "TEST_TASK_ID",
                     "execution_date": self.default_time,
                     "owner": 'airflow',
                     "when": self.default_time_2,
                     "extra": None,
                 },
                 {
                     "event_log_id": log_model_3.id,
                     "event": "cli_scheduler",
                     "dag_id": None,
                     "task_id": None,
                     "execution_date": None,
                     "owner": 'root',
                     "when": self.default_time_2,
                     "extra": '{"host_name": "e24b454f002a"}',
                 },
             ],
             "total_entries":
             3,
         },
     )
 def test_should_response_200(self, session):
     log_model = Log(
         event='TEST_EVENT',
         task_instance=self._create_task_instance(),
     )
     log_model.dttm = timezone.parse(self.default_time)
     session.add(log_model)
     session.commit()
     event_log_id = log_model.id
     response = self.client.get(f"/api/v1/eventLogs/{event_log_id}",
                                environ_overrides={'REMOTE_USER': "******"})
     assert response.status_code == 200
     self.assertEqual(
         response.json,
         {
             "event_log_id": event_log_id,
             "event": "TEST_EVENT",
             "dag_id": "TEST_DAG_ID",
             "task_id": "TEST_TASK_ID",
             "execution_date": self.default_time,
             "owner": 'airflow',
             "when": self.default_time,
             "extra": None,
         },
     )
    def test_should_raises_401_unauthenticated(self, session):
        log_model_1 = Log(
            event='TEST_EVENT_1',
            task_instance=self._create_task_instance(),
        )
        log_model_2 = Log(
            event='TEST_EVENT_2',
            task_instance=self._create_task_instance(),
        )
        log_model_1.dttm = timezone.parse(self.default_time)
        log_model_2.dttm = timezone.parse(self.default_time_2)
        session.add_all([log_model_1, log_model_2])
        session.commit()

        response = self.client.get("/api/v1/eventLogs")

        assert_401(response)
    def test_should_raises_401_unauthenticated(self, session):
        log_model = Log(
            event='TEST_EVENT',
            task_instance=self._create_task_instance(),
        )
        log_model.dttm = timezone.parse(self.default_time)
        session.add(log_model)
        session.commit()
        event_log_id = log_model.id

        response = self.client.get(f"/api/v1/eventLogs/{event_log_id}")

        assert_401(response)
 def test_should_response_200(self, session):
     log_model_1 = Log(
         event='TEST_EVENT_1',
         task_instance=self._create_task_instance(),
     )
     log_model_2 = Log(
         event='TEST_EVENT_2',
         task_instance=self._create_task_instance(),
     )
     log_model_1.dttm = timezone.parse(self.default_time)
     log_model_2.dttm = timezone.parse(self.default_time_2)
     session.add_all([log_model_1, log_model_2])
     session.commit()
     response = self.client.get("/api/v1/eventLogs")
     assert response.status_code == 200
     self.assertEqual(
         response.json, {
             "event_logs": [{
                 "event_log_id": log_model_1.id,
                 "event": "TEST_EVENT_1",
                 "dag_id": "TEST_DAG_ID",
                 "task_id": "TEST_TASK_ID",
                 "execution_date": self.default_time,
                 "owner": 'airflow',
                 "when": self.default_time,
                 "extra": None
             }, {
                 "event_log_id": log_model_2.id,
                 "event": "TEST_EVENT_2",
                 "dag_id": "TEST_DAG_ID",
                 "task_id": "TEST_TASK_ID",
                 "execution_date": self.default_time,
                 "owner": 'airflow',
                 "when": self.default_time_2,
                 "extra": None
             }],
             "total_entries":
             2
         })
示例#7
0
def add_log(execdate, session, timezone_override=None):
    dag = DAG(dag_id='logging', default_args={'start_date': execdate})
    task = DummyOperator(task_id='dummy', dag=dag, owner='airflow')
    task_instance = TaskInstance(task=task,
                                 execution_date=execdate,
                                 state='success')
    session.merge(task_instance)
    log = Log(State.RUNNING, task_instance)
    if timezone_override:
        log.dttm = log.dttm.astimezone(timezone_override)
    session.add(log)
    session.commit()
    return log
示例#8
0
 def test_serialize(self, session):
     event_log_model = Log(event="TEST_EVENT", task_instance=self._create_task_instance())
     session.add(event_log_model)
     session.commit()
     event_log_model.dttm = timezone.parse(self.default_time)
     log_model = session.query(Log).first()
     deserialized_log = event_log_schema.dump(log_model)
     assert deserialized_log == {
         "event_log_id": event_log_model.id,
         "event": "TEST_EVENT",
         "dag_id": "TEST_DAG_ID",
         "task_id": "TEST_TASK_ID",
         "execution_date": self.default_time,
         "owner": 'airflow',
         "when": self.default_time,
         "extra": None,
     }