def test_update_queue(self, mock_hook): mock_hook.return_value.update_queue.return_value = {} operator = CloudTasksQueueUpdateOperator( task_queue=Queue(name=FULL_QUEUE_PATH), task_id="id") operator.execute(context=None) mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) mock_hook.return_value.update_queue.assert_called_once_with( task_queue=Queue(name=FULL_QUEUE_PATH), project_id=None, location=None, queue_name=None, update_mask=None, retry=None, timeout=None, metadata=None, )
def update_queue( self, task_queue: Queue, project_id: str, location: Optional[str] = None, queue_name: Optional[str] = None, update_mask: Optional[FieldMask] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None) -> Queue: """ Updates a queue in Cloud Tasks. :param task_queue: The task queue to update. This method creates the queue if it does not exist and updates the queue if it does exist. The queue's name must be specified. :type task_queue: dict or class google.cloud.tasks_v2.types.Queue :param project_id: (Optional) The ID of the GCP project that owns the Cloud Tasks. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :param location: (Optional) The location name in which the queue will be updated. If provided, it will be used to construct the full queue path. :type location: str :param queue_name: (Optional) The queue's name. If provided, it will be used to construct the full queue path. :type queue_name: str :param update_mask: A mast used to specify which fields of the queue are being updated. If empty, then all fields will be updated. If a dict is provided, it must be of the same form as the protobuf message. :type update_mask: dict or class google.cloud.tasks_v2.types.FieldMask :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: (Optional) Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.tasks_v2.types.Queue """ client = self.get_conn() if queue_name and location: full_queue_name = CloudTasksClient.queue_path( project_id, location, queue_name) if isinstance(task_queue, Queue): task_queue.name = full_queue_name elif isinstance(task_queue, dict): task_queue['name'] = full_queue_name else: raise AirflowException('Unable to set queue_name.') return client.update_queue( queue=task_queue, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata, )
def execute(self, context): hook = CloudTasksHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) try: queue = hook.create_queue( location=self.location, task_queue=self.task_queue, project_id=self.project_id, queue_name=self.queue_name, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) except AlreadyExists: queue = hook.get_queue( location=self.location, project_id=self.project_id, queue_name=self.queue_name, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) return Queue.to_dict(queue)
def test_create_queue(self, mock_hook): mock_hook.return_value.create_queue.return_value = {} operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=Queue(), task_id="id") operator.execute(context=None) mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) mock_hook.return_value.create_queue.assert_called_once_with( location=LOCATION, task_queue=Queue(), project_id=None, queue_name=None, retry=None, timeout=None, metadata=None, )
def execute(self, context: 'Context'): hook = CloudTasksHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) try: queue = hook.create_queue( location=self.location, task_queue=self.task_queue, project_id=self.project_id, queue_name=self.queue_name, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) except AlreadyExists: if self.queue_name is None: raise RuntimeError("The queue name should be set here!") queue = hook.get_queue( location=self.location, project_id=self.project_id, queue_name=self.queue_name, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) return Queue.to_dict(queue)
def test_update_queue(self, get_conn): result = self.hook.update_queue( task_queue=Queue(state=3), location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID, ) self.assertIs(result, API_RESPONSE) get_conn.return_value.update_queue.assert_called_once_with( queue=Queue(name=FULL_QUEUE_PATH, state=3), update_mask=None, retry=None, timeout=None, metadata=None, )
def test_create_queue(self, get_conn): result = self.hook.create_queue( location=LOCATION, task_queue=Queue(), queue_name=QUEUE_ID, project_id=PROJECT_ID, ) self.assertIs(result, API_RESPONSE) get_conn.return_value.create_queue.assert_called_once_with( parent=FULL_LOCATION_PATH, queue=Queue(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=None, )
def update_queue( self, task_queue: Queue, project_id: str = PROVIDE_PROJECT_ID, location: Optional[str] = None, queue_name: Optional[str] = None, update_mask: Optional[FieldMask] = None, retry: Union[Retry, _MethodDefault] = DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Queue: """ Updates a queue in Cloud Tasks. :param task_queue: The task queue to update. This method creates the queue if it does not exist and updates the queue if it does exist. The queue's name must be specified. :param project_id: (Optional) The ID of the Google Cloud project that owns the Cloud Tasks. If set to None or missing, the default project_id from the Google Cloud connection is used. :param location: (Optional) The location name in which the queue will be updated. If provided, it will be used to construct the full queue path. :param queue_name: (Optional) The queue's name. If provided, it will be used to construct the full queue path. :param update_mask: A mast used to specify which fields of the queue are being updated. If empty, then all fields will be updated. If a dict is provided, it must be of the same form as the protobuf message. :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :param timeout: (Optional) The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :param metadata: (Optional) Additional metadata that is provided to the method. :rtype: google.cloud.tasks_v2.types.Queue """ client = self.get_conn() if queue_name and location: full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" if isinstance(task_queue, Queue): task_queue.name = full_queue_name elif isinstance(task_queue, dict): task_queue['name'] = full_queue_name else: raise AirflowException('Unable to set queue_name.') return client.update_queue( request={ 'queue': task_queue, 'update_mask': update_mask }, retry=retry, timeout=timeout, metadata=metadata, )
def test_resume_queue(self, get_conn): result = self.hook.resume_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.resume_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=())
def test_update_queue(self, mock_hook): mock_hook.return_value.update_queue.return_value = TEST_QUEUE operator = CloudTasksQueueUpdateOperator( task_queue=Queue(name=FULL_QUEUE_PATH), task_id="id") result = operator.execute(context=None) self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, ) mock_hook.return_value.update_queue.assert_called_once_with( task_queue=Queue(name=FULL_QUEUE_PATH), project_id=None, location=None, queue_name=None, update_mask=None, retry=None, timeout=None, metadata=None, )
def execute(self, context: 'Context'): hook = CloudTasksHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) queue = hook.resume_queue( location=self.location, queue_name=self.queue_name, project_id=self.project_id, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) return Queue.to_dict(queue)
def test_list_queues(self, get_conn): result = self.hook.list_queues(location=LOCATION, project_id=PROJECT_ID) self.assertEqual(result, [Queue(name=FULL_QUEUE_PATH)]) get_conn.return_value.list_queues.assert_called_once_with( request=dict(parent=FULL_LOCATION_PATH, filter=None, page_size=None), retry=None, timeout=None, metadata=(), )
def execute(self, context: 'Context'): hook = CloudTasksHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) queues = hook.list_queues( location=self.location, project_id=self.project_id, results_filter=self.results_filter, page_size=self.page_size, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) return [Queue.to_dict(q) for q in queues]
def ensure_required_queues_exist(): """ Reads settings.CLOUDS_TASKS_QUEUES and creates or updates the specified queues """ client = get_cloud_tasks_client() parent_path = cloud_tasks_parent_path() for queue in getattr(settings, "CLOUD_TASKS_QUEUES", []): queue_name = queue["name"] # In our task settings we expect that the queue name will not # include the path, otherwise moving the app, changing location # etc. involves changing a load of settings. assert ("/" not in queue_name) # Don't specify the full path update_mask = ["name"] queue_dict = {} queue_dict["name"] = "%s/queues/%s" % (parent_path, queue_name) queue_dict["rate_limits"] = {} queue_dict["retry_config"] = {} if "rate_per_second" in queue: update_mask.append("rate_limits.max_dispatches_per_second") queue_dict["rate_limits"]["max_dispatches_per_second"] = queue[ "rate_per_second"] if "rate_max_concurrent" in queue: update_mask.append("rate_limits.max_concurrent_dispatches") queue_dict["rate_limits"]["max_concurrent_dispatches"] = queue[ "rate_max_concurrent"] if "retry_max_attempts" in queue: update_mask.append("retry_config.max_attempts") queue_dict["retry_config"]["max_attempts"] = queue[ "retry_max_attempts"] logging.debug("Ensuring task queue is up-to-date: %s", queue_dict["name"]) from google.cloud.tasks_v2.types import Queue queue = Queue(**queue_dict) client.update_queue( queue=queue, update_mask=field_mask_pb2.FieldMask(paths=update_mask))
def execute(self, context: 'Context'): hook = CloudTasksHook( gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) queue = hook.pause_queue( location=self.location, queue_name=self.queue_name, project_id=self.project_id, retry=self.retry, timeout=self.timeout, metadata=self.metadata, ) CloudTasksQueueLink.persist( operator_instance=self, context=context, queue_name=queue.name, ) return Queue.to_dict(queue)
TASK = { "app_engine_http_request": { # Specify the type of request. "http_method": "POST", "relative_uri": "/example_task_handler", "body": "Hello".encode(), }, "schedule_time": timestamp, } with DAG("example_gcp_tasks", default_args=default_args, schedule_interval=None) as dag: create_queue = CloudTasksQueueCreateOperator( location=LOCATION, task_queue=Queue(), queue_name=QUEUE_ID, retry=Retry(maximum=10.0), timeout=5, task_id="create_queue", ) create_task_to_run = CloudTasksTaskCreateOperator( location=LOCATION, queue_name=QUEUE_ID, task=TASK, task_name=TASK_NAME, retry=Retry(maximum=10.0), timeout=5, task_id="create_task_to_run", )
class TestCloudTasksHook(unittest.TestCase): def setUp(self): with mock.patch( "airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__", new=mock_base_gcp_hook_no_default_project_id, ): self.hook = CloudTasksHook(gcp_conn_id="test") @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.client_info", new_callable=mock.PropertyMock, ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook._get_credentials" ) @mock.patch("airflow.providers.google.cloud.hooks.tasks.CloudTasksClient") def test_cloud_tasks_client_creation(self, mock_client, mock_get_creds, mock_client_info): result = self.hook.get_conn() mock_client.assert_called_once_with( credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value) self.assertEqual(mock_client.return_value, result) self.assertEqual(self.hook._client, result) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.create_queue.return_value": API_RESPONSE}, # type: ignore ) def test_create_queue(self, get_conn): result = self.hook.create_queue( location=LOCATION, task_queue=Queue(), queue_name=QUEUE_ID, project_id=PROJECT_ID, ) self.assertIs(result, API_RESPONSE) get_conn.return_value.create_queue.assert_called_once_with( request=dict(parent=FULL_LOCATION_PATH, queue=Queue(name=FULL_QUEUE_PATH)), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.update_queue.return_value": API_RESPONSE}, # type: ignore ) def test_update_queue(self, get_conn): result = self.hook.update_queue( task_queue=Queue(state=3), location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID, ) self.assertIs(result, API_RESPONSE) get_conn.return_value.update_queue.assert_called_once_with( request=dict(queue=Queue(name=FULL_QUEUE_PATH, state=3), update_mask=None), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.get_queue.return_value": API_RESPONSE}, # type: ignore ) def test_get_queue(self, get_conn): result = self.hook.get_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertIs(result, API_RESPONSE) get_conn.return_value.get_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{ "return_value.list_queues.return_value": [Queue(name=FULL_QUEUE_PATH)] }, # type: ignore ) def test_list_queues(self, get_conn): result = self.hook.list_queues(location=LOCATION, project_id=PROJECT_ID) self.assertEqual(result, [Queue(name=FULL_QUEUE_PATH)]) get_conn.return_value.list_queues.assert_called_once_with( request=dict(parent=FULL_LOCATION_PATH, filter=None, page_size=None), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.delete_queue.return_value": None}, # type: ignore ) def test_delete_queue(self, get_conn): result = self.hook.delete_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, None) get_conn.return_value.delete_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{ "return_value.purge_queue.return_value": Queue(name=FULL_QUEUE_PATH) }, # type: ignore ) def test_purge_queue(self, get_conn): result = self.hook.purge_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.purge_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{ "return_value.pause_queue.return_value": Queue(name=FULL_QUEUE_PATH) }, # type: ignore ) def test_pause_queue(self, get_conn): result = self.hook.pause_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.pause_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{ "return_value.resume_queue.return_value": Queue(name=FULL_QUEUE_PATH) }, # type: ignore ) def test_resume_queue(self, get_conn): result = self.hook.resume_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.resume_queue.assert_called_once_with( request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.create_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_create_task(self, get_conn): result = self.hook.create_task( location=LOCATION, queue_name=QUEUE_ID, task=Task(), project_id=PROJECT_ID, task_name=TASK_NAME, ) self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.create_task.assert_called_once_with( request=dict(parent=FULL_QUEUE_PATH, task=Task(name=FULL_TASK_PATH), response_view=None), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.get_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_get_task(self, get_conn): result = self.hook.get_task( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, project_id=PROJECT_ID, ) self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.get_task.assert_called_once_with( request=dict(name=FULL_TASK_PATH, response_view=None), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{ "return_value.list_tasks.return_value": [Task(name=FULL_TASK_PATH)] }, # type: ignore ) def test_list_tasks(self, get_conn): result = self.hook.list_tasks(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) self.assertEqual(result, [Task(name=FULL_TASK_PATH)]) get_conn.return_value.list_tasks.assert_called_once_with( request=dict(parent=FULL_QUEUE_PATH, response_view=None, page_size=None), retry=None, timeout=None, metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.delete_task.return_value": None}, # type: ignore ) def test_delete_task(self, get_conn): result = self.hook.delete_task( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, project_id=PROJECT_ID, ) self.assertEqual(result, None) get_conn.return_value.delete_task.assert_called_once_with( request=dict(name=FULL_TASK_PATH), retry=None, timeout=None, metadata=()) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", **{"return_value.run_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_run_task(self, get_conn): result = self.hook.run_task( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, project_id=PROJECT_ID, ) self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.run_task.assert_called_once_with( request=dict(name=FULL_TASK_PATH, response_view=None), retry=None, timeout=None, metadata=(), )
"body": b"Hello", }, "schedule_time": timestamp, } with models.DAG( "example_gcp_tasks", schedule_interval=None, # Override to match your needs start_date=days_ago(1), tags=['example'], ) as dag: # Queue operations create_queue = CloudTasksQueueCreateOperator( location=LOCATION, task_queue=Queue(stackdriver_logging_config=dict(sampling_ratio=0.5)), queue_name=QUEUE_ID, retry=Retry(maximum=10.0), timeout=5, task_id="create_queue", ) delete_queue = CloudTasksQueueDeleteOperator( location=LOCATION, queue_name=QUEUE_ID, task_id="delete_queue", ) resume_queue = CloudTasksQueueResumeOperator( location=LOCATION, queue_name=QUEUE_ID,
CloudTasksTaskCreateOperator, CloudTasksTaskDeleteOperator, CloudTasksTaskGetOperator, CloudTasksTaskRunOperator, CloudTasksTasksListOperator, ) GCP_CONN_ID = "google_cloud_default" PROJECT_ID = "test-project" LOCATION = "asia-east2" FULL_LOCATION_PATH = "projects/test-project/locations/asia-east2" QUEUE_ID = "test-queue" FULL_QUEUE_PATH = "projects/test-project/locations/asia-east2/queues/test-queue" TASK_NAME = "test-task" FULL_TASK_PATH = "projects/test-project/locations/asia-east2/queues/test-queue/tasks/test-task" TEST_QUEUE = Queue(name=FULL_QUEUE_PATH) TEST_TASK = Task(app_engine_http_request={}) class TestCloudTasksQueueCreate(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook" ) def test_create_queue(self, mock_hook): mock_hook.return_value.create_queue.return_value = TEST_QUEUE operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=TEST_QUEUE, task_id="id") result = operator.execute(context=None) self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result)