Ejemplo n.º 1
0
    def store_retry(self,
                    dag_name: str,
                    location: str,
                    timestamp: Optional[str] = None) -> None:
        """Stores a retry log-item into monitoring DB.

    Args:
      dag_name: Airflow DAG ID that is associated with the current monitoring.
      location: The run input resource location URL.
      timestamp: The log timestamp. If None, current timestamp will be used.
    """
        if timestamp is None:
            timestamp = _generate_zone_aware_timestamp()

        row = self._values_to_row(dag_name=dag_name,
                                  timestamp=timestamp,
                                  type_id=MonitoringEntityMap.RETRY.value,
                                  location=location,
                                  position='',
                                  info='')
        try:
            self._store_monitoring_items_with_retries([row])
        except exceptions.AirflowException as error:
            raise errors.MonitoringAppendLogError(
                error=error, msg='Failed to insert retry row')
Ejemplo n.º 2
0
    def store_blob(self,
                   dag_name: str,
                   location: str,
                   position: int,
                   num_rows: int,
                   timestamp: Optional[str] = None) -> None:
        """Stores all blobs log-item into monitoring DB.

    Args:
      dag_name: Airflow DAG ID that is associated with the current monitoring.
      location: The run input resource location URL.
      position: The events' starting position within the BigQuery table or
        Google Cloud Storage blob file.
      num_rows: Number of rows read in blob starting from start_id.
      timestamp: The log timestamp. If None, current timestamp will be used.
    """
        if timestamp is None:
            timestamp = _generate_zone_aware_timestamp()

        row = self._values_to_row(dag_name=dag_name,
                                  timestamp=timestamp,
                                  type_id=MonitoringEntityMap.BLOB.value,
                                  location=location,
                                  position=str(position),
                                  info=str(num_rows))
        try:
            self._store_monitoring_items_with_retries([row])
        except exceptions.AirflowException as error:
            raise errors.MonitoringAppendLogError(error=error,
                                                  msg='Failed to insert rows')
Ejemplo n.º 3
0
    def store_run(self,
                  dag_name: str,
                  location: str,
                  timestamp: str = None,
                  json_report_1: str = '',
                  json_report_2: str = '') -> None:
        """Stores a run log-item into monitoring DB.

    Args:
      dag_name: Airflow DAG ID that is associated with the current monitoring.
      location: The run input resource location URL.
      timestamp: The log timestamp. If None, current timestamp will be used.
      json_report_1: Any run related report data in JSON format.
      json_report_2: Any run related report data in JSON format.
    """
        if timestamp is None:
            timestamp = _generate_zone_aware_timestamp()

        row = self._values_to_row(dag_name=dag_name,
                                  timestamp=timestamp,
                                  type_id=MonitoringEntityMap.RUN.value,
                                  location=location,
                                  position=json_report_1,
                                  info=json_report_2)
        try:
            self._store_monitoring_items_with_retries([row])
        except exceptions.AirflowException as error:
            raise errors.MonitoringAppendLogError(error=error,
                                                  msg='Failed to insert rows')
Ejemplo n.º 4
0
    def store_events(
        self,
        dag_name: str,
        location: str,
        timestamp: Optional[str] = None,
        id_event_error_tuple_list: Optional[List[Tuple[int, Dict[str, Any],
                                                       int]]] = None
    ) -> None:
        """Stores all event log-items into monitoring DB.

    Args:
      dag_name: Airflow DAG ID that is associated with the current monitoring.
      location: The run input resource location URL.
      timestamp: The log timestamp. If None, current timestamp will be used.
      id_event_error_tuple_list: all (id, event, error_num) trupls to store.
        The tuples are a set of 3 fields:
         - id: Row IDs of events in BigQuery input table, or line numbers
           in a google cloud storage blob file.
         - event: the JSON event.
         - error: The errors.MonitoringIDsMap error ID.
    """
        if timestamp is None:
            timestamp = _generate_zone_aware_timestamp()

        rows = []
        for id_event_error_tuple in id_event_error_tuple_list:
            rows.append(
                self._values_to_row(dag_name=dag_name,
                                    timestamp=timestamp,
                                    type_id=id_event_error_tuple[2],
                                    location=location,
                                    position=str(id_event_error_tuple[0]),
                                    info=json.dumps(id_event_error_tuple[1])))

        try:
            self._store_monitoring_items_with_retries(rows)
        except exceptions.AirflowException as error:
            raise errors.MonitoringAppendLogError(error=error,
                                                  msg='Failed to insert rows')