def report_failure(context):
    send_email = email_operator.EmailOperator(
        task_id='email_summary',
        to='*****@*****.**',
        subject='Test email notify on task failure',
        html_content="Atendimento emergĂȘncial!!! \n Erro na task: "+ context.get("task_instance").task_id,
    )
    send_email.execute(context)
                default_args=DEFAULT_DAG_ARGS) as dag:
    # Export task that will process SQL statement and save files to Cloud Storage.
    export_sales_orders = mssql_to_gcs.MsSqlToGoogleCloudStorageOperator(
        task_id='mrrecvh_mrrecvd_ppprice_lj',
        sql=query1,
        bucket=models.Variable.get('mssql_export_bucket'),
        filename=DATE + '-export.json',
        mssql_conn_id='shapiro-sql',
        dag=dag)

    # Here we create two conditional tasks, one of which will be executed
    # based on whether the export_sales_orders was a success or a failure.
    success_move_task = email_operator.EmailOperator(
        task_id='success',
        trigger_rule=TriggerRule.ALL_SUCCESS,
        to=models.Variable.get('email'),
        subject=
        'mssql_gcs_dataflow_bigquery_dag_1 Job Succeeded: start_date {{ ds }}',
        html_content="HTML CONTENT")

    failure_move_task = email_operator.EmailOperator(
        task_id='failure',
        trigger_rule=TriggerRule.ALL_FAILED,
        to=models.Variable.get('email'),
        subject=
        'mssql_gcs_dataflow_bigquery_dag_1 Job Failed: start_date {{ ds }}',
        html_content="HTML CONTENT")

    # The success_move_task and failure_move_task are both downstream from the
    # dataflow_task.
    export_sales_orders >> success_move_task
Example #3
0
        dataset_id=bq_dataset_name,
        table_id=BQ_MOST_POPULAR_TABLE_NAME)

    # [START composer_email]
    # Send email confirmation
    email_summary = email_operator.EmailOperator(
        task_id='email_summary',
        to=models.Variable.get('email'),
        subject='Sample BigQuery notify data ready',
        html_content="""
        Analyzed Stack Overflow posts data from {min_date} 12AM to {max_date}
        12AM. The most popular question was '{question_title}' with
        {view_count} views. Top 100 questions asked are now available at:
        {export_location}.
        """.format(
            min_date=min_query_date,
            max_date=max_query_date,
            question_title=(
                '{{ ti.xcom_pull(task_ids=\'bq_read_most_popular\', '
                'key=\'return_value\')[0][0] }}'
            ),
            view_count=(
                '{{ ti.xcom_pull(task_ids=\'bq_read_most_popular\', '
                'key=\'return_value\')[0][1] }}'
            ),
            export_location=output_file))
    # [END composer_email]

    # Delete BigQuery dataset
    # Delete the bq table
    delete_bq_dataset = bash_operator.BashOperator(
    query_current_sales_products = bigquery_operator.BigQueryOperator(
        task_id='query_products_on_sale',
        bql="""
        SELECT product_id, product_name
        FROM `{bq_table_id}` 
        WHERE sale = True
        """.format(bq_table_id=models.Variable.get('bq_table_id')),
        use_legacy_sql=False,
        destination_dataset_table=bq_products_on_sale_table_id,
        write_disposition='WRITE_TRUNCATE')

    export_data_to_gcs = bigquery_to_gcs.BigQueryToCloudStorageOperator(
        task_id='export_sale_data_to_gcs',
        source_project_dataset_table=bq_products_on_sale_table_id,
        destination_cloud_storage_uris=[output_file],
        export_format='CSV')

    email_updation_notification = email_operator.EmailOperator(
        task_id='email_notification',
        to=email_id,
        subject='Sale product data updated',
        html_content="""
        Updated sale products for {current_time}.
        """.format(current_time=updated_time),
        trigger_rule=trigger_rule.TriggerRule.ALL_SUCCESS)

    delete_bq_table = bigquery_table_delete_operator.BigQueryTableDeleteOperator(
        task_id='delete_bigquery_table',
        deletion_dataset_table=bq_products_on_sale_table_id)
    (query_current_sales_products >> export_data_to_gcs >>
     email_updation_notification >> delete_bq_table)
Example #5
0
        WHERE
          a.id IS NULL
          OR b.id IS NULL
          OR FARM_FINGERPRINT(FORMAT("%T", a)) <> FARM_FINGERPRINT(FORMAT("%T", b))
        """.format(table="Google_CarlosAugusto.table1_log"),
    use_legacy_sql=False,
    dag=dag)

bq_trunc_table1_staging = bigquery_operator.BigQueryOperator(
    task_id='bq_truncate_table1_staging',
    bql="""
        TRUNCATE TABLE 
          `{table}`
        """.format(table="Google_CarlosAugusto.table1_staging"),
    use_legacy_sql=False,
    dag=dag)

# Send email confirmation
# Needs SendGrid to be configured here
email_summary = email_operator.EmailOperator(
    task_id='email_summary',
    to=models.Variable.get('email'),
    subject='Table1 Load Job',
    html_content=""" <h3>Table1 Load Job Finished Successfully</h3> """,
    dag=dag)
# [END composer_email]

#start >> t1 >> end

start >> bq_trunc_table1_staging >> dataflow_load_table1 >> bq_log_table1_cdc >> bq_merge_table1 >> email_summary >> end
                schedule_interval=datetime.timedelta(days=1),
                default_args=default_dag_args) as dag:

    bq_query = bigquery_operator.BigQueryOperator(
        task_id='bq_query',
        bql="""
        SELECT *
        FROM `david-playground-1.orderedtest.unordered`
        """,
        use_legacy_sql=False,
        write_disposition='WRITE_TRUNCATE',
        destination_dataset_table=bq_output)

    export_to_gcs = bigquery_to_gcs.BigQueryToCloudStorageOperator(
        task_id='export_to_gcs',
        source_project_dataset_table=bq_output,
        destination_cloud_storage_uris=[output_file],
        export_format='CSV')

    email_summary = email_operator.EmailOperator(
        task_id='email_summary',
        to=models.Variable.get('email'),
        subject='Sample BigQuery data ready',
        html_content="""
        Ran Job at {start_date}
        Export available at {export_location}
        """.format(start_date=start_date, export_location=output_file))

    # Define DAG dependencies.
    bq_query >> export_to_gcs >> email_summary