Esempio n. 1
0
def get_dag_warnings(
    *,
    limit: int,
    dag_id: Optional[str] = None,
    warning_type: Optional[str] = None,
    offset: Optional[int] = None,
    order_by: str = "timestamp",
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get DAG warnings.

    :param dag_id: the dag_id to optionally filter by
    :param warning_type: the warning type to optionally filter by
    """
    allowed_filter_attrs = ["dag_id", "warning_type", "message", "timestamp"]
    query = session.query(DagWarningModel)
    if dag_id:
        query = query.filter(DagWarningModel.dag_id == dag_id)
    if warning_type:
        query = query.filter(DagWarningModel.warning_type == warning_type)
    total_entries = query.count()
    query = apply_sorting(query=query, order_by=order_by, allowed_attrs=allowed_filter_attrs)
    dag_warnings = query.offset(offset).limit(limit).all()
    return dag_warning_collection_schema.dump(
        DagWarningCollection(dag_warnings=dag_warnings, total_entries=total_entries)
    )
Esempio n. 2
0
def get_event_logs(
    *,
    limit: int,
    offset: Optional[int] = None,
    order_by: str = "event_log_id",
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get all log entries from event log"""
    to_replace = {"event_log_id": "id", "when": "dttm"}
    allowed_filter_attrs = [
        'event_log_id',
        "when",
        "dag_id",
        "task_id",
        "event",
        "execution_date",
        "owner",
        "extra",
    ]
    total_entries = session.query(func.count(Log.id)).scalar()
    query = session.query(Log)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    event_logs = query.offset(offset).limit(limit).all()
    return event_log_collection_schema.dump(
        EventLogCollection(event_logs=event_logs, total_entries=total_entries))
Esempio n. 3
0
def get_pools(session, limit, order_by='id', offset=None):
    """Get all pools"""
    to_replace = {"name": "pool"}
    allowed_filter_attrs = ['name', 'slots', "id"]
    total_entries = session.query(func.count(Pool.id)).scalar()
    query = session.query(Pool)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    pools = query.offset(offset).limit(limit).all()
    return pool_collection_schema.dump(
        PoolCollection(pools=pools, total_entries=total_entries))
Esempio n. 4
0
def get_import_errors(session, limit, offset=None, order_by='import_error_id'):
    """Get all import errors"""
    to_replace = {"import_error_id": 'id'}
    allowed_filter_attrs = ['import_error_id', "timestamp", "filename"]
    total_entries = session.query(func.count(ImportErrorModel.id)).scalar()
    query = session.query(ImportErrorModel)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    import_errors = query.offset(offset).limit(limit).all()
    return import_error_collection_schema.dump(
        ImportErrorCollection(import_errors=import_errors, total_entries=total_entries)
    )
def get_roles(limit, order_by='name', offset=None):
    """Get roles"""
    appbuilder = current_app.appbuilder
    session = appbuilder.get_session
    total_entries = session.query(func.count(Role.id)).scalar()
    to_replace = {"role_id": "id"}
    allowed_filter_attrs = ['role_id', 'name']
    query = session.query(Role)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    roles = query.offset(offset).limit(limit).all()

    return role_collection_schema.dump(RoleCollection(roles=roles, total_entries=total_entries))
def get_roles(*, order_by: str = "name", limit: int, offset: Optional[int] = None) -> APIResponse:
    """Get roles"""
    appbuilder = current_app.appbuilder
    session = appbuilder.get_session
    total_entries = session.query(func.count(Role.id)).scalar()
    to_replace = {"role_id": "id"}
    allowed_filter_attrs = ["role_id", "name"]
    query = session.query(Role)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    roles = query.offset(offset).limit(limit).all()

    return role_collection_schema.dump(RoleCollection(roles=roles, total_entries=total_entries))
Esempio n. 7
0
def get_connections(session, limit, offset=0, order_by="id"):
    """Get all connection entries"""
    to_replace = {"connection_id": "conn_id"}
    allowed_filter_attrs = [
        'connection_id', 'conn_type', 'description', 'host', 'port', 'id'
    ]

    total_entries = session.query(func.count(Connection.id)).scalar()
    query = session.query(Connection)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    connections = query.offset(offset).limit(limit).all()
    return connection_collection_schema.dump(
        ConnectionCollection(connections=connections,
                             total_entries=total_entries))
Esempio n. 8
0
def get_variables(session,
                  limit: Optional[int],
                  order_by: str = "id",
                  offset: Optional[int] = None) -> Response:
    """Get all variable values"""
    total_entries = session.query(func.count(Variable.id)).scalar()
    to_replace = {"value": "val"}
    allowed_filter_attrs = ['value', 'key', 'id']
    query = session.query(Variable)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    variables = query.offset(offset).limit(limit).all()
    return variable_collection_schema.dump({
        "variables": variables,
        "total_entries": total_entries,
    })
Esempio n. 9
0
def get_pools(
    *,
    limit: int,
    order_by: str = "id",
    offset: Optional[int] = None,
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get all pools"""
    to_replace = {"name": "pool"}
    allowed_filter_attrs = ['name', 'slots', "id"]
    total_entries = session.query(func.count(Pool.id)).scalar()
    query = session.query(Pool)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    pools = query.offset(offset).limit(limit).all()
    return pool_collection_schema.dump(
        PoolCollection(pools=pools, total_entries=total_entries))
Esempio n. 10
0
def get_import_errors(
    *,
    limit: int,
    offset: Optional[int] = None,
    order_by: str = "import_error_id",
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get all import errors"""
    to_replace = {"import_error_id": 'id'}
    allowed_filter_attrs = ['import_error_id', "timestamp", "filename"]
    total_entries = session.query(func.count(ImportErrorModel.id)).scalar()
    query = session.query(ImportErrorModel)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    import_errors = query.offset(offset).limit(limit).all()
    return import_error_collection_schema.dump(
        ImportErrorCollection(import_errors=import_errors,
                              total_entries=total_entries))
Esempio n. 11
0
def _fetch_dag_runs(
    query: Query,
    *,
    end_date_gte: Optional[str],
    end_date_lte: Optional[str],
    execution_date_gte: Optional[str],
    execution_date_lte: Optional[str],
    start_date_gte: Optional[str],
    start_date_lte: Optional[str],
    limit: Optional[int],
    offset: Optional[int],
    order_by: str,
) -> Tuple[List[DagRun], int]:
    if start_date_gte:
        query = query.filter(DagRun.start_date >= start_date_gte)
    if start_date_lte:
        query = query.filter(DagRun.start_date <= start_date_lte)
    # filter execution date
    if execution_date_gte:
        query = query.filter(DagRun.execution_date >= execution_date_gte)
    if execution_date_lte:
        query = query.filter(DagRun.execution_date <= execution_date_lte)
    # filter end date
    if end_date_gte:
        query = query.filter(DagRun.end_date >= end_date_gte)
    if end_date_lte:
        query = query.filter(DagRun.end_date <= end_date_lte)

    total_entries = query.count()
    to_replace = {"dag_run_id": "run_id"}
    allowed_filter_attrs = [
        "id",
        "state",
        "dag_id",
        "execution_date",
        "dag_run_id",
        "start_date",
        "end_date",
        "external_trigger",
        "conf",
    ]
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    return query.offset(offset).limit(limit).all(), total_entries
Esempio n. 12
0
def get_users(*, limit: int, order_by: str = "id", offset: Optional[str] = None) -> APIResponse:
    """Get users"""
    appbuilder = current_app.appbuilder
    session = appbuilder.get_session
    total_entries = session.query(func.count(User.id)).scalar()
    to_replace = {"user_id": "id"}
    allowed_filter_attrs = [
        "user_id",
        'id',
        "first_name",
        "last_name",
        "user_name",
        "email",
        "is_active",
        "role",
    ]
    query = session.query(User)
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    users = query.offset(offset).limit(limit).all()

    return user_collection_schema.dump(UserCollection(users=users, total_entries=total_entries))
Esempio n. 13
0
def get_datasets(
    *,
    limit: int,
    offset: int = 0,
    uri_pattern: Optional[str] = None,
    order_by: str = "id",
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get datasets"""
    allowed_attrs = ['id', 'uri', 'created_at', 'updated_at']

    total_entries = session.query(func.count(DatasetModel.id)).scalar()
    query = session.query(DatasetModel)
    if uri_pattern:
        query = query.filter(DatasetModel.uri.ilike(f"%{uri_pattern}%"))
    query = apply_sorting(query, order_by, {}, allowed_attrs)
    datasets = (query.options(subqueryload(
        DatasetModel.consuming_dags), subqueryload(
            DatasetModel.producing_tasks)).offset(offset).limit(limit).all())
    return dataset_collection_schema.dump(
        DatasetCollection(datasets=datasets, total_entries=total_entries))
Esempio n. 14
0
def _fetch_dag_runs(
    query,
    end_date_gte,
    end_date_lte,
    execution_date_gte,
    execution_date_lte,
    start_date_gte,
    start_date_lte,
    limit,
    offset,
    order_by,
):
    query = _apply_date_filters_to_query(
        query,
        end_date_gte,
        end_date_lte,
        execution_date_gte,
        execution_date_lte,
        start_date_gte,
        start_date_lte,
    )
    # Count items
    total_entries = query.count()
    # sort
    to_replace = {"dag_run_id": "run_id"}
    allowed_filter_attrs = [
        "id",
        "state",
        "dag_id",
        "execution_date",
        "dag_run_id",
        "start_date",
        "end_date",
        "external_trigger",
        "conf",
    ]
    query = apply_sorting(query, order_by, to_replace, allowed_filter_attrs)
    # apply offset and limit
    dag_run = query.offset(offset).limit(limit).all()
    return dag_run, total_entries
Esempio n. 15
0
def get_dataset_events(
    *,
    limit: int,
    offset: int = 0,
    order_by: str = "timestamp",
    dataset_id: Optional[int] = None,
    source_dag_id: Optional[str] = None,
    source_task_id: Optional[str] = None,
    source_run_id: Optional[str] = None,
    source_map_index: Optional[int] = None,
    session: Session = NEW_SESSION,
) -> APIResponse:
    """Get dataset events"""
    allowed_attrs = [
        'source_dag_id', 'source_task_id', 'source_run_id', 'source_map_index',
        'timestamp'
    ]

    query = session.query(DatasetEvent)

    if dataset_id:
        query = query.filter(DatasetEvent.dataset_id == dataset_id)
    if source_dag_id:
        query = query.filter(DatasetEvent.source_dag_id == source_dag_id)
    if source_task_id:
        query = query.filter(DatasetEvent.source_task_id == source_task_id)
    if source_run_id:
        query = query.filter(DatasetEvent.source_run_id == source_run_id)
    if source_map_index:
        query = query.filter(DatasetEvent.source_map_index == source_map_index)

    total_entries = query.count()
    query = apply_sorting(query, order_by, {}, allowed_attrs)
    events = query.offset(offset).limit(limit).all()
    return dataset_event_collection_schema.dump(
        DatasetEventCollection(dataset_events=events,
                               total_entries=total_entries))