def test_succeedifexists(self, mock_hook):
        operator = PubSubTopicCreateOperator(task_id=TASK_ID,
                                             project=TEST_PROJECT,
                                             topic=TEST_TOPIC,
                                             fail_if_exists=False)

        operator.execute(None)
        mock_hook.return_value.create_topic.assert_called_once_with(
            TEST_PROJECT, TEST_TOPIC, fail_if_exists=False)
Exemplo n.º 2
0
    def test_succeedifexists(self, mock_hook):
        operator = PubSubTopicCreateOperator(task_id=TASK_ID,
                                             project=TEST_PROJECT,
                                             topic=TEST_TOPIC,
                                             fail_if_exists=False)

        operator.execute(None)
        mock_hook.return_value.create_topic.assert_called_once_with(
            TEST_PROJECT, TEST_TOPIC, fail_if_exists=False)
    'email_on_failure': False,
    'email_on_retry': False,
    'project': project,
    'topic': topic,
    'subscription': subscription,
}


echo_template = '''
{% for m in task_instance.xcom_pull(task_ids='pull-messages') %}
    echo "AckID: {{ m.get('ackId') }}, Base64-Encoded: {{ m.get('message') }}"
{% endfor %}
'''

with DAG('pubsub-end-to-end', default_args=default_args,
         schedule_interval=datetime.timedelta(days=1)) as dag:
    t1 = PubSubTopicCreateOperator(task_id='create-topic')
    t2 = PubSubSubscriptionCreateOperator(
        task_id='create-subscription', topic_project=project,
        subscription=subscription)
    t3 = PubSubPublishOperator(
        task_id='publish-messages', messages=messages)
    t4 = PubSubPullSensor(task_id='pull-messages', ack_messages=True)
    t5 = BashOperator(task_id='echo-pulled-messages',
                      bash_command=echo_template)
    t6 = PubSubSubscriptionDeleteOperator(task_id='delete-subscription')
    t7 = PubSubTopicDeleteOperator(task_id='delete-topic')

    t1 >> t2 >> t3
    t2 >> t4 >> t5 >> t6 >> t7
TOPIC_NAME = Variable.get("topic_name")
RICK_TABLE_NAME = Variable.get("rick_table_name")
MORTY_TABLE_NAME = Variable.get("morty_table_name")
DATASET_SOURCE_PATH = Variable.get("dataset_source_path")
GCP_PROJECT_ID = Variable.get("project_id")
BASE_JAR_PATH = Variable.get("base_jar_path")
GCP_CONNECTION_ID = "google_cloud_connection_temp"
with DAG("rickandmorty_symbolic_word",
         catchup=True,
         default_args=default_args,
         schedule_interval='@daily') as dag:
    start_pipeline = DummyOperator(task_id="StartPipeline")

    create_pubsub_topic_operator = PubSubTopicCreateOperator(
        task_id="CreatePubSubTopic",
        topic=TOPIC_NAME,
        project=GCP_PROJECT_ID,
        fail_if_exists=False,
        gcp_conn_id=GCP_CONNECTION_ID)

    clean_and_push_episode_to_pubsub = CleanAndPushEpisodeOperator(
        task_id="CleanAndPushEpisodeToPubSub",
        topic_name=TOPIC_NAME,
        source_path=DATASET_SOURCE_PATH,
        execution_date='{{ds}}',
        project_id=GCP_PROJECT_ID,
        gcp_conn_id=GCP_CONNECTION_ID)

    # dialogs_symbolic_word_to_bigquery = DataFlowJavaOperator(
    #     gcp_conn_id=GCP_CONNECTION_ID,
    #     job_name='rickandmorty_symbolic_word',
    #     task_id='ProcessDialogsSymbolicWordToBigQuery',