} dag_name = 'bq_events_to_amplitude' with models.DAG( dag_name, default_args=default_args, schedule_interval='0 2 * * *') as dag: fenix_task_id = 'fenix_amplitude_export' SubDagOperator( subdag=export_to_amplitude( dag_name=fenix_task_id, parent_dag_name=dag_name, default_args=default_args, project='moz-fx-data-shared-prod', dataset='telemetry', table_or_view='fenix_events_v1', s3_prefix='fenix', ), task_id=fenix_task_id ) shredder_fenix = gke_command( task_id="shredder_amplitude_fenix", name="shredder-amplitude-fenix", command=[ "script/shredder_amplitude", "--date={{ ds }}", "--api-key={{ var.value.fenix_amplitude_api_key }}", "--secret-key={{ var.value.fenix_amplitude_secret_key }}",
catchup=True, default_args=default_args) as dag: fxa_export_table_create = bigquery_etl_query( task_id='fxa_amplitude_export_v1', project_id='moz-fx-data-shared-prod', destination_table='fxa_amplitude_export_v1', dataset_id='firefox_accounts_derived', depends_on_past=True) task_id = 'fxa_amplitude_export_task' fxa_amplitude_export = SubDagOperator(subdag=export_to_amplitude( dag_name=task_id, parent_dag_name=dag_name, default_args=default_args, project='moz-fx-data-shared-prod', dataset='firefox_accounts', table_or_view='fxa_amplitude_export', s3_prefix='fxa-active', ), task_id=task_id) fxa_export_table_create >> fxa_amplitude_export fxa_amplitude_user_ids = bigquery_etl_query( task_id='fxa_amplitude_user_ids', project_id='moz-fx-data-shared-prod', destination_table='fxa_amplitude_user_ids_v1', dataset_id='firefox_accounts_derived', depends_on_past=True, start_date=datetime.datetime(2020, 5, 10),
from airflow import models from airflow.operators.subdag_operator import SubDagOperator from utils.amplitude import export_to_amplitude default_args = { 'owner': '*****@*****.**', 'start_date': datetime.datetime(2019, 6, 27), 'email': ['*****@*****.**', '*****@*****.**'], 'email_on_failure': True, 'email_on_retry': True, 'retries': 2, 'retry_delay': datetime.timedelta(minutes=10), 'schedule_interval': '0 1 * * *', } dag_name = 'bq_events_to_amplitude' with models.DAG(dag_name, default_args=default_args) as dag: task_id = 'fenix_amplitude_export' SubDagOperator(subdag=export_to_amplitude( dag_name=task_id, parent_dag_name=dag_name, default_args=default_args, dataset='telemetry', table_or_view='fenix_events_v1', s3_prefix='fenix', ), task_id=task_id)
'retry_delay': datetime.timedelta(minutes=10), } dag_name = 'bq_events_to_amplitude' with models.DAG(dag_name, default_args=default_args, schedule_interval='0 1 * * *') as dag: fenix_task_id = 'fenix_amplitude_export' SubDagOperator(subdag=export_to_amplitude( dag_name=fenix_task_id, parent_dag_name=dag_name, default_args=default_args, project='moz-fx-data-derived-datasets', dataset='telemetry', table_or_view='fenix_events_v1', s3_prefix='fenix', ), task_id=fenix_task_id) fennec_ios_task_id = 'fennec_ios_amplitude_export' fennec_ios_args = default_args.copy() fennec_ios_args["start_date"] = datetime.datetime(2019, 12, 2) SubDagOperator(subdag=export_to_amplitude( dag_name=fennec_ios_task_id, parent_dag_name=dag_name, default_args=fennec_ios_args, project='moz-fx-data-shared-prod', dataset='telemetry',