コード例 #1
0
# pylint: disable=unused-variable

import datetime

from dagster_airflow.factory import make_airflow_dag

DEFAULT_ARGS = {
    "owner": "airflow",
    "depends_on_past": False,
    "start_date": datetime.datetime(2019, 11, 7),
    "email": ["*****@*****.**"],
    "email_on_failure": False,
    "email_on_retry": False,
}

dag, tasks = make_airflow_dag(
    module_name="docs_snippets.integrations.airflow.hello_cereal",
    job_name="hello_cereal_job",
    dag_kwargs={
        "default_args": DEFAULT_ARGS,
        "max_active_runs": 1
    },
)
コード例 #2
0
    config:
      base_dir: /tmp/dagster-airflow/hello_cereal_pipeline

'''

################################################################################
# #
# # NOTE: these arguments should be edited for your environment
# #
################################################################################
DEFAULT_ARGS = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime.datetime(2019, 11, 7),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
}

dag, tasks = make_airflow_dag(
    # NOTE: you must ensure that docs_snippets.intro_tutorial.airflow is
    # installed or available on sys.path, otherwise, this import will fail.
    module_name='docs_snippets.intro_tutorial.airflow',
    pipeline_name='hello_cereal_pipeline',
    run_config=yaml.safe_load(ENVIRONMENT),
    dag_kwargs={
        'default_args': DEFAULT_ARGS,
        'max_active_runs': 1
    },
)
コード例 #3
0
ファイル: backfill.py プロジェクト: openclimatefix/Satip
    }
  }
}
"""
import datetime
from airflow.models import Variable
from dagster_airflow.factory import make_airflow_dag

backfill_config = Variable.get("backfill_config", deserialize_json=True)

DEFAULT_ARGS = {
    "owner": "airflow",
    "depends_on_past": False,
    "start_date": datetime.datetime(2020, 12, 14),
    "email": ["*****@*****.**"],
    "email_on_failure": False,
    "email_on_retry": False,
}

dag, tasks = make_airflow_dag(
    module_name="satip.backfill",
    pipeline_name="download_missing_data_pipeline",
    run_config=backfill_config,
    dag_kwargs={
        "default_args": DEFAULT_ARGS,
        "max_active_runs": 1,
        "catchup": "False",
        "schedule_interval": None
    },
)
コード例 #4
0
            password: baz
            database: USER_DATA
            schema: BMESICK
            warehouse: ADHOC
'''

################################################################################
# #
# # NOTE: these arguments should be edited for your environment
# #
################################################################################
DEFAULT_ARGS = {
    'owner': 'airflow',
    'depends_on_past': False,
    'start_date': datetime.datetime(2019, 10, 21),
    'email': ['*****@*****.**'],
    'email_on_failure': False,
    'email_on_retry': False,
}

dag, tasks = make_airflow_dag(
    # NOTE: you must ensure that dags.test2-dag is
    # installed or available on sys.path, otherwise, this import will fail.
    module_name='dags.test2-dag',
    pipeline_name='test2_pipeline',
    environment_dict=yaml.load(ENVIRONMENT),
    dag_kwargs={
        'default_args': DEFAULT_ARGS,
        'max_active_runs': 1
    })
コード例 #5
0
    config:
      base_dir: /tmp/dagster-airflow/hello_cereal_pipeline

"""

################################################################################
# #
# # NOTE: these arguments should be edited for your environment
# #
################################################################################
DEFAULT_ARGS = {
    "owner": "airflow",
    "depends_on_past": False,
    "start_date": datetime.datetime(2019, 11, 7),
    "email": ["*****@*****.**"],
    "email_on_failure": False,
    "email_on_retry": False,
}

dag, tasks = make_airflow_dag(
    # NOTE: you must ensure that dagster_examples.intro_tutorial.airflow is
    # installed or available on sys.path, otherwise, this import will fail.
    module_name="dagster_examples.airflow",
    pipeline_name="hello_cereal_pipeline",
    environment_dict=yaml.safe_load(ENVIRONMENT),
    dag_kwargs={
        "default_args": DEFAULT_ARGS,
        "max_active_runs": 1
    },
)
コード例 #6
0
ファイル: zarr_pipeline.py プロジェクト: openclimatefix/Satip
    config:
      base_dir: /srv/airflow/data

"""

################################################################################
# #
# # NOTE: these arguments should be edited for your environment
# #
################################################################################
DEFAULT_ARGS = {
    "owner": "airflow",
    "depends_on_past": False,
    "start_date": datetime.datetime(2020, 12, 14),
    "email": ["*****@*****.**"],
    "email_on_failure": False,
    "email_on_retry": False,
}

dag, tasks = make_airflow_dag(
    module_name="satip.mario",
    pipeline_name="download_latest_data_pipeline",
    run_config=yaml.safe_load(ENVIRONMENT),
    dag_kwargs={
        "default_args": DEFAULT_ARGS,
        "max_active_runs": 1,
        "schedule_interval": "*/30 * * * *",
        "catchup": "False"
    },
)
コード例 #7
0
ファイル: mounted.py プロジェクト: trevenrawr/dagster
from dagster_airflow.factory import make_airflow_dag

dag, steps = make_airflow_dag(
    module_name="docs_snippets.intro_tutorial.airflow",
    pipeline_name="hello_cereal_pipeline",
    run_config={"storage": {"filesystem": {"config": {"base_dir": "/container_tmp"}}}},
    dag_id=None,
    dag_description=None,
    dag_kwargs=None,
    op_kwargs={"host_tmp_dir": "/host_tmp", "tmp_dir": "/container_tmp"},
)