def test_execute(self, mock_gcf_hook, mock_xcom): exec_id = 'exec_id' mock_gcf_hook.return_value.call_function.return_value = { 'executionId': exec_id } function_id = "test_function" payload = {'key': 'value'} api_version = 'test' gcp_conn_id = 'test_conn' op = CloudFunctionInvokeFunctionOperator(task_id='test', function_id=function_id, input_data=payload, location=GCP_LOCATION, project_id=GCP_PROJECT_ID, api_version=api_version, gcp_conn_id=gcp_conn_id) op.execute(None) mock_gcf_hook.assert_called_once_with(api_version=api_version, gcp_conn_id=gcp_conn_id) mock_gcf_hook.return_value.call_function.assert_called_once_with( function_id=function_id, input_data=payload, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) mock_xcom.assert_called_once_with(context=None, key='execution_id', value=exec_id)
catchup=False, tags=['example'], ) as dag: # [START howto_operator_gcf_deploy] deploy_task = CloudFunctionDeployFunctionOperator( task_id="gcf_deploy_task", project_id=GCP_PROJECT_ID, location=GCP_LOCATION, body=body, validate_body=GCP_VALIDATE_BODY, ) # [END howto_operator_gcf_deploy] # [START howto_operator_gcf_deploy_no_project_id] deploy2_task = CloudFunctionDeployFunctionOperator( task_id="gcf_deploy2_task", location=GCP_LOCATION, body=body, validate_body=GCP_VALIDATE_BODY ) # [END howto_operator_gcf_deploy_no_project_id] # [START howto_operator_gcf_invoke_function] invoke_task = CloudFunctionInvokeFunctionOperator( task_id="invoke_task", project_id=GCP_PROJECT_ID, location=GCP_LOCATION, input_data={}, function_id=GCF_SHORT_FUNCTION_NAME, ) # [END howto_operator_gcf_invoke_function] # [START howto_operator_gcf_delete] delete_task = CloudFunctionDeleteFunctionOperator(task_id="gcf_delete_task", name=FUNCTION_NAME) # [END howto_operator_gcf_delete] deploy_task >> deploy2_task >> invoke_task >> delete_task
access_control={"test_acc_control": {"can_read", "can_edit"}}) payload = Variable.get("CF_PAYLOADS", deserialize_json=True) payload = json.dumps(payload[0]['create_dataset']) project_id = env_vars["project_id"] #service account to connect GCP gcp_conn_id = "sa-med-ml" location = "us-central1" # cloud function name function_id = "cf_create_eta_automl_dataset" invoke_cf = CloudFunctionInvokeFunctionOperator(task_id="invoke_cf", project_id=project_id, location=location, gcp_conn_id=gcp_conn_id, input_data={"data": payload}, function_id=function_id, dag=dag) email_to = "*****@*****.**" subject = "Create dataset cloud function successfully initiated" html_content = "<p> You have got mail! <p>" success = EmailOperator(task_id="success_mail", to=email_to, subject=subject, html_content=html_content, dag=dag) trigger_rule = "one_failed"
# [START howto_operator_gcf_default_args] default_args = {'owner': 'airflow'} # [END howto_operator_gcf_default_args] with models.DAG( 'Main_Data_Pipeline', schedule_interval=None, # Override to match your needs start_date=dates.days_ago(1), tags=['example'], ) as dag: invoke_task0 = CloudFunctionInvokeFunctionOperator( task_id="invoke_file_validation", location=GCP_LOCATION, input_data={}, function_id="invoke_Validation", ) invoke_task1 = CloudFunctionInvokeFunctionOperator( task_id="invoke_stg_trading_trans", location=GCP_LOCATION, input_data={}, function_id="test", ) invoke_task2 = CloudFunctionInvokeFunctionOperator( task_id="invoke_stg_arrangement", location=GCP_LOCATION, input_data={}, function_id="test1", ) invoke_task3 = CloudFunctionInvokeFunctionOperator(