def _on_config_inited(app, config):
    del app
    jinja_context = getattr(config, 'jinja_contexts', None) or {}

    jinja_context['providers_ctx'] = {'providers': load_package_data()}

    config.jinja_contexts = jinja_context
Пример #2
0
def _prepare_transfer_data(tags: Optional[Set[str]]):
    package_data = load_package_data()
    all_operators_by_integration = _prepare_resource_index(
        package_data, "integrations")
    # Add edge case
    for name in ["SQL", "Local"]:
        all_operators_by_integration[name] = {"integration-name": name}
    all_transfers = [{
        **transfer,
        'package-name':
        provider['package-name'],
        'source-integration':
        all_operators_by_integration[transfer['source-integration-name']],
        'target-integration':
        all_operators_by_integration[transfer['target-integration-name']],
    } for provider in package_data
                     for transfer in provider.get("transfers", [])]
    if tags is None:
        to_display_transfers = all_transfers
    else:
        to_display_transfers = [
            transfer for transfer in all_transfers
            if tags.intersection(transfer['source-integration'].get(
                'tags', set())) or tags.intersection(
                    transfer['target-integration'].get('tags', set()))
        ]

    for transfer in to_display_transfers:
        if 'how-to-guide' not in transfer:
            continue
        transfer['how-to-guide'] = _docs_path(transfer['how-to-guide'])
    return to_display_transfers
def _prepare_operators_data(tags: Optional[Set[str]]):
    package_data = load_package_data()
    all_integrations = _prepare_resource_index(package_data, "integrations")
    if tags is None:
        to_display_integration = all_integrations
    else:
        to_display_integration = [
            integration for integration in all_integrations.values() if tags.intersection(integration["tags"])
        ]

    all_operators_by_integration = _prepare_resource_index(package_data, "operators")
    all_hooks_by_integration = _prepare_resource_index(package_data, "hooks")
    all_sensors_by_integration = _prepare_resource_index(package_data, "hooks")
    results = []

    for integration in to_display_integration:
        item = {
            "integration": integration,
        }
        operators = all_operators_by_integration.get(integration['integration-name'])
        sensors = all_sensors_by_integration.get(integration['integration-name'])
        hooks = all_hooks_by_integration.get(integration['integration-name'])

        if 'how-to-guide' in item['integration']:
            item['integration']['how-to-guide'] = [_docs_path(d) for d in item['integration']['how-to-guide']]
        if operators:
            item['operators'] = operators
        if sensors:
            item['hooks'] = sensors
        if hooks:
            item['hooks'] = hooks
        if operators or sensors or hooks:
            results.append(item)

    return sorted(results, key=lambda d: d["integration"]["integration-name"].lower())
Пример #4
0
def _prepare_extra_links_data():
    package_data = load_package_data()
    all_extra_links = {}
    for provider in package_data:
        extra_link_list = provider.get("extra-links")
        if extra_link_list:
            package_name = provider['package-name']
            all_extra_links[package_name] = {
                'name': provider['name'],
                'extra_links': extra_link_list,
            }
    return all_extra_links
Пример #5
0
def _prepare_connections_data():
    package_data = load_package_data()
    all_connections = {}
    for provider in package_data:
        connections_list = provider.get("connection-types")
        if connections_list:
            package_name = provider['package-name']
            all_connections[package_name] = {
                'name': provider['name'],
                'connection_types': connections_list,
            }
    return all_connections
Пример #6
0
def _prepare_secrets_backend_data():
    package_data = load_package_data()
    all_secret_backends = {}
    for provider in package_data:
        secret_backends_list = provider.get("secrets-backends")
        if secret_backends_list:
            package_name = provider['package-name']
            all_secret_backends[package_name] = {
                'name': provider['name'],
                'secrets_backends': secret_backends_list,
            }
    return all_secret_backends
Пример #7
0
def _prepare_auth_backend_data():
    package_data = load_package_data()
    all_auth_backends = {}
    for provider in package_data:
        auth_backends_list = provider.get("auth-backends")
        if auth_backends_list:
            package_name = provider['package-name']
            all_auth_backends[package_name] = {
                'name': provider['name'],
                'auth_backends': auth_backends_list
            }
    return all_auth_backends
Пример #8
0
def _prepare_logging_data():
    package_data = load_package_data()
    all_logging = {}
    for provider in package_data:
        logging_handlers = provider.get("logging")
        if logging_handlers:
            package_name = provider['package-name']
            all_logging[package_name] = {
                'name': provider['name'],
                'handlers': logging_handlers
            }
    return all_logging
Пример #9
0
def _generate_provider_intersphinx_mapping():
    airflow_mapping = {}
    for_production = os.environ.get('AIRFLOW_FOR_PRODUCTION',
                                    'false') == 'true'
    current_version = 'stable' if for_production else 'latest'

    for provider in load_package_data():
        package_name = provider['package-name']
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name:
            continue

        provider_base_url = f'/docs/{package_name}/{current_version}/'
        doc_inventory = f'{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/{package_name}/objects.inv'

        # Skip adding the mapping if the path does not exist
        if not os.path.exists(doc_inventory) and not os.path.exists(
                cache_inventory):
            continue

        airflow_mapping[package_name] = (
            # base URI
            provider_base_url,
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )
    for pkg_name in ["apache-airflow", 'helm-chart']:
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == pkg_name:
            continue
        doc_inventory = f'{DOCS_DIR}/_build/docs/{pkg_name}/{current_version}/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv'

        airflow_mapping[pkg_name] = (
            # base URI
            f'/docs/{pkg_name}/latest/',
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )
    for pkg_name in ['apache-airflow-providers', 'docker-stack']:
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == pkg_name:
            continue
        doc_inventory = f'{DOCS_DIR}/_build/docs/{pkg_name}/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv'

        airflow_mapping[pkg_name] = (
            # base URI
            f'/docs/{pkg_name}/',
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )

    return airflow_mapping
Пример #10
0
def _generate_provider_intersphinx_mapping():
    airflow_mapping = {}
    for provider in load_package_data():
        package_name = provider['package-name']
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name:
            continue

        # For local build and S3, use relative URLS.
        # For RTD, use absolute URLs
        if IS_RTD:
            provider_base_url = f"{S3_DOC_URL}/docs/{package_name}/latest/"
        else:
            provider_base_url = f'/docs/{package_name}/latest/'

        airflow_mapping[package_name] = (
            # base URI
            provider_base_url,
            # Index locations list
            # If passed None, this will try to fetch the index from `[base_url]/objects.inv`
            # If we pass a path containing `://` then we will try to index from the given address.
            # Otherwise, it will try to read the local file
            #
            # In this case, the local index will be read. If unsuccessful, the remote index
            # will be fetched.
            (
                f'{DOCS_DIR}/_build/docs/{package_name}/latest/objects.inv',
                f'{S3_DOC_URL}/docs/{package_name}/latest/objects.inv',
            ),
        )
    if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow':
        airflow_mapping['apache-airflow'] = (
            # base URI
            '/docs/apache-airflow/latest/',
            # Index locations list
            # If passed None, this will try to fetch the index from `[base_url]/objects.inv`
            # If we pass a path containing `://` then we will try to index from the given address.
            # Otherwise, it will try to read the local file
            #
            # In this case, the local index will be read. If unsuccessful, the remote index
            # will be fetched.
            (
                f'{DOCS_DIR}/_build/docs/apache-airflow/latest/objects.inv',
                'https://airflow.readthedocs.io/en/latest/objects.inv',
            ),
        )

    return airflow_mapping
Пример #11
0
def _generate_provider_intersphinx_mapping():
    airflow_mapping = {}
    for_production = os.environ.get('AIRFLOW_FOR_PRODUCTION',
                                    'false') == 'true'
    current_version = 'stable' if for_production else 'latest'

    for provider in load_package_data():
        package_name = provider['package-name']
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name:
            continue

        provider_base_url = f'/docs/{package_name}/{current_version}/'
        doc_inventory = f'{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/{package_name}/objects.inv'

        airflow_mapping[package_name] = (
            # base URI
            provider_base_url,
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )
    if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow':
        doc_inventory = f'{DOCS_DIR}/_build/docs/apache-airflow/{current_version}/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/apache-airflow/objects.inv'

        airflow_mapping['apache-airflow'] = (
            # base URI
            f'/docs/apache-airflow/{current_version}/',
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )

    if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow-providers':
        doc_inventory = f'{DOCS_DIR}/_build/docs/apache-airflow-providers/objects.inv'
        cache_inventory = f'{DOCS_DIR}/_inventory_cache/apache-airflow-providers/objects.inv'

        airflow_mapping['apache-airflow-providers'] = (
            # base URI
            '/docs/apache-airflow-providers/',
            (doc_inventory
             if os.path.exists(doc_inventory) else cache_inventory, ),
        )

    return airflow_mapping
Пример #12
0
CONF_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
INVENTORY_CACHE_DIR = os.path.join(CONF_DIR, '_inventory_cache')
ROOT_DIR = os.path.abspath(os.path.join(CONF_DIR, os.pardir))
FOR_PRODUCTION = os.environ.get('AIRFLOW_FOR_PRODUCTION', 'false') == 'true'

# By default (e.g. on RTD), build docs for `airflow` package
PACKAGE_NAME = os.environ.get('AIRFLOW_PACKAGE_NAME', 'apache-airflow')
PACKAGE_DIR: Optional[str]
if PACKAGE_NAME == 'apache-airflow':
    PACKAGE_DIR = os.path.join(ROOT_DIR, 'airflow')
    PACKAGE_VERSION = airflow.__version__
elif PACKAGE_NAME.startswith('apache-airflow-providers-'):
    from provider_yaml_utils import load_package_data  # pylint: disable=no-name-in-module

    ALL_PROVIDER_YAMLS = load_package_data()
    try:
        CURRENT_PROVIDER = next(
            provider_yaml for provider_yaml in ALL_PROVIDER_YAMLS
            if provider_yaml['package-name'] == PACKAGE_NAME)
    except StopIteration:
        raise Exception(
            f"Could not find provider.yaml file for package: {PACKAGE_NAME}")
    PACKAGE_DIR = CURRENT_PROVIDER['package-dir']
    PACKAGE_VERSION = 'master'
else:
    PACKAGE_DIR = None
    PACKAGE_VERSION = 'master'
# Adds to environment variables for easy access from other plugins like airflow_intersphinx.
os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME
if PACKAGE_DIR:
Пример #13
0
def _generate_provider_intersphinx_mapping():
    airflow_mapping = {}
    for_production = os.environ.get('AIRFLOW_FOR_PRODUCTION',
                                    'false') == 'true'
    current_version = 'stable' if for_production else 'latest'

    for provider in load_package_data():
        package_name = provider['package-name']
        if os.environ.get('AIRFLOW_PACKAGE_NAME') == package_name:
            continue

        provider_base_url = f'/docs/{package_name}/{current_version}/'

        airflow_mapping[package_name] = (
            # base URI
            provider_base_url,
            # Index locations list
            # If passed None, this will try to fetch the index from `[base_url]/objects.inv`
            # If we pass a path containing `://` then we will try to index from the given address.
            # Otherwise, it will try to read the local file
            #
            # In this case, the local index will be read. If unsuccessful, the remote index
            # will be fetched.
            (
                f'{DOCS_DIR}/_build/docs/{package_name}/{current_version}/objects.inv',
                f'{S3_DOC_URL}/docs/{package_name}/latest/objects.inv',
            ),
        )
    if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow':
        airflow_mapping['apache-airflow'] = (
            # base URI
            f'/docs/apache-airflow/{current_version}/',
            # Index locations list
            # If passed None, this will try to fetch the index from `[base_url]/objects.inv`
            # If we pass a path containing `://` then we will try to index from the given address.
            # Otherwise, it will try to read the local file
            #
            # In this case, the local index will be read. If unsuccessful, the remote index
            # will be fetched.
            (
                f'{DOCS_DIR}/_build/docs/apache-airflow/{current_version}/objects.inv',
                f'{S3_DOC_URL}/docs/apache-airflow/latest/objects.inv',
            ),
        )

    if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow-providers':
        airflow_mapping['apache-airflow-providers'] = (
            # base URI
            '/docs/apache-airflow-providers/',
            # Index locations list
            # If passed None, this will try to fetch the index from `[base_url]/objects.inv`
            # If we pass a path containing `://` then we will try to index from the given address.
            # Otherwise, it will try to read the local file
            #
            # In this case, the local index will be read. If unsuccessful, the remote index
            # will be fetched.
            (
                f'{DOCS_DIR}/_build/docs/apache-airflow-providers/objects.inv',
                f'{S3_DOC_URL}/docs/apache-airflow-providers/objects.inv',
            ),
        )

    return airflow_mapping