def ParseQueue(queue, location=None): """Parses an id or uri for a queue. Args: queue: An id, self-link, or relative path of a queue resource. location: The location of the app associated with the active project. Returns: A queue resource reference, or None if passed-in queue is Falsy. """ if not queue: return None queue_ref = None try: queue_ref = resources.REGISTRY.Parse( queue, collection=constants.QUEUES_COLLECTION) except resources.RequiredFieldOmittedException: app_location = location or app.ResolveAppLocation() location_ref = ParseLocation(app_location) queue_ref = resources.REGISTRY.Parse( queue, params={ 'projectsId': location_ref.projectsId, 'locationsId': location_ref.locationsId }, collection=constants.QUEUES_COLLECTION) return queue_ref
def DeployCronYamlFile(scheduler_api, config, existing_jobs): """Perform a deployment based on the parsed 'cron.yaml' file. For every job defined in the cron.yaml file, we will create a new cron job for any job that did not already exist in our backend. We will also delete those jobs which are not present in the YAML file but exist in our backend. Note: We do not update any jobs. The only operations are Create and Delete. So if we modify any attribute of an existing job in the YAML file, the old job gets deleted and a new job is created based on the new attributes. Args: scheduler_api: api_lib.scheduler.<Alpha|Beta|GA>ApiAdapter, Cloud Scheduler API needed for doing jobs based operations. config: A yaml_parsing.ConfigYamlInfo object for the parsed YAML file we are going to process. existing_jobs: A list of jobs that already exist in the backend. Each job maps to an apis.cloudscheduler.<ver>.cloudscheduler<ver>_messages.Job instance. Returns: A list of responses received from the Cloud Scheduler APIs representing job states for every call made to create a job. """ cron_yaml = config.parsed jobs_client = scheduler_api.jobs app_location = app.ResolveAppLocation( parsers.ParseProject(), locations_client=scheduler_api.locations) region_ref = parsers.ParseLocation(app_location).RelativeName() project = os.path.basename(str(parsers.ParseProject())) existing_jobs_dict = _BuildJobsMappingDict(existing_jobs, project) # Create a new job for any job that does not map exactly to jobs that already # exist in the backend. responses = [] if cron_yaml.cron: for yaml_job in cron_yaml.cron: _ReplaceDefaultRetryParamsForYamlJob(yaml_job) job_key = _CreateUniqueJobKeyForYamlJob(yaml_job) if job_key in existing_jobs_dict and existing_jobs_dict[job_key]: # If the job already exists then we do not need to do anything. # TODO(b/169069379): Enhance to pop based on oldest/newest existing_jobs_dict[job_key].pop() continue job = CreateJobInstance(scheduler_api, yaml_job) responses.append(jobs_client.Create(region_ref, job)) # TODO(b/169069379): Preserve next job execution for jobs whose only change # is description # Delete the jobs which are no longer in the YAML file for jobs_list in existing_jobs_dict.values(): for yaml_job in jobs_list: jobs_client.Delete(yaml_job.name) return responses
def testResolveMultipleLocation(self): self.locations_service.List.Expect( self.messages.CloudtasksProjectsLocationsListRequest( name=self.project_ref.RelativeName(), pageSize=2), response=self.messages.ListLocationsResponse(locations=[ self._LocationMessage(self.messages, 'us-central1'), self._LocationMessage(self.messages, 'us-east1'), ])) with self.assertRaises(tasks_app_command_lib.RegionResolvingError): tasks_app_command_lib.ResolveAppLocation(self.project_ref)
def testResolveLocation(self): self.locations_service.List.Expect( self.messages.CloudtasksProjectsLocationsListRequest( name=self.project_ref.RelativeName(), pageSize=2), response=self.messages.ListLocationsResponse(locations=[ self._LocationMessage(self.messages, 'us-central1') ])) actual_location = tasks_app_command_lib.ResolveAppLocation( self.project_ref) expected_location = 'us-central1' self.assertEqual(actual_location, expected_location)
def testResolveLocation_CreateApp_RaceCollision(self): self._ExpectAppEngineListLocationsRequest() self.WriteInput('y') # Would you like to create one (Y/n)? self.WriteInput( '1') # [1] us-central (supports standard and flexible) self.app_engine_apitools_mock_client.apps.Create.Expect( self.app_engine_messages.Application(id=self.Project(), locationId='us-central'), exception=http_error.MakeHttpError(code=409)) with self.assertRaises(create_util.AppAlreadyExistsError): tasks_app_command_lib.ResolveAppLocation(self.project_ref)
def ParseQueue(queue): """Parses an id or uri for a queue.""" queue_ref = None try: queue_ref = resources.REGISTRY.Parse(queue, collection=constants.QUEUES_COLLECTION) except resources.RequiredFieldOmittedException: location_ref = ParseLocation(app.ResolveAppLocation()) queue_ref = resources.REGISTRY.Parse( queue, params={'projectsId': location_ref.projectsId, 'locationsId': location_ref.locationsId}, collection=constants.QUEUES_COLLECTION) return queue_ref
def FetchCurrentJobsData(scheduler_api): """Fetches the current jobs data stored in the database. Args: scheduler_api: api_lib.scheduler.<Alpha|Beta|GA>ApiAdapter, Cloud Scheduler API needed for doing jobs based operations. Returns: A list of currently existing jobs in the backend. """ jobs_client = scheduler_api.jobs app_location = app.ResolveAppLocation( parsers.ParseProject(), locations_client=scheduler_api.locations) region_ref = parsers.ParseLocation(app_location).RelativeName() return list(x for x in jobs_client.List(region_ref))
def ParseTask(task, queue): """Parses an id or uri for a task.""" task_ref = None try: task_ref = resources.REGISTRY.Parse(task, collection=constants.TASKS_COLLECTION, params={'queuesId': queue}) except resources.RequiredFieldOmittedException: location_ref = ParseLocation(app.ResolveAppLocation()) task_ref = resources.REGISTRY.Parse( task, params={'projectsId': location_ref.projectsId, 'locationsId': location_ref.locationsId, 'queuesId': queue}, collection=constants.TASKS_COLLECTION) return task_ref
def testResolveLocation_CreateApp(self): self._ExpectAppEngineListLocationsRequest() self.WriteInput('y') # Would you like to create one (Y/n)? self.WriteInput( '1') # [1] us-central (supports standard and flexible) self._ExpectCreateAppRequest() self.locations_service.List.Expect( self.messages.CloudtasksProjectsLocationsListRequest( name=self.project_ref.RelativeName(), pageSize=2), response=self.messages.ListLocationsResponse(locations=[ self._LocationMessage(self.messages, 'us-central1') ])) actual_location = tasks_app_command_lib.ResolveAppLocation( self.project_ref) expected_location = 'us-central1' self.assertEqual(actual_location, expected_location)
def FetchCurrentQueuesData(tasks_api): """Fetches the current queues data stored in the database. Args: tasks_api: api_lib.tasks.<Alpha|Beta|GA>ApiAdapter, Cloud Tasks API needed for doing queue based operations. Returns: A dictionary with queue names as keys and corresponding protobuf Queue objects as values apis.cloudtasks.<ver>.cloudtasks_<ver>_messages.Queue """ queues_client = tasks_api.queues app_location = app.ResolveAppLocation(parsers.ParseProject()) region_ref = parsers.ParseLocation(app_location) all_queues_in_db_dict = { os.path.basename(x.name): x for x in queues_client.List(region_ref) } return all_queues_in_db_dict
def __call__(self): if self.location is None: self.location = app.ResolveAppLocation(valid_regions=VALID_REGIONS, product='Cloud Scheduler') return self.location
def Run(self, args): queues_client = GetApiAdapter(self.ReleaseTrack()).queues app_location = args.location or app.ResolveAppLocation() region_ref = parsers.ParseLocation(app_location) return queues_client.List(region_ref, args.limit, args.page_size)
def Run(self, args): queues_client = queues.Queues() app_location = args.location or app.ResolveAppLocation() region_ref = parsers.ParseLocation(app_location) return queues_client.List(region_ref, args.limit, args.page_size)
def testResolveLocation_CreateApp_Cancel(self): self.WriteInput('n') # Would you like to create one (Y/n)? with self.assertRaises(tasks_app_command_lib.RegionResolvingError): tasks_app_command_lib.ResolveAppLocation(self.project_ref)