def get_job(id): """Get a job's metadata by API Job ID. Args: id (str): Job ID to get Returns: JobMetadataResponse: Response containing relevant metadata """ proj_id, job_id, task_id, attempt = job_ids.api_to_dsub( id, _provider_type()) provider = providers.get_provider(_provider_type(), proj_id, _auth_token()) jobs = [] try: jobs = execute_redirect_stdout(lambda: dstat.dstat_job_producer( provider=provider, statuses={'*'}, job_ids={job_id}, task_ids={task_id} if task_id else None, task_attempts={attempt} if attempt else None, full_output=True).next()) except apiclient.errors.HttpError as error: _handle_http_error(error, proj_id) # A job_id and task_id define a unique job (should only be one) if len(jobs) > 1: raise BadRequest('Found more than one job with ID {}'.format(id)) elif len(jobs) == 0: raise NotFound('Could not find any jobs with ID {}'.format(id)) return _metadata_response(id, jobs[0])
def dstat_get_jobs(statuses=None, job_ids=None, task_ids=None, labels=None, create_time_min=None, create_time_max=None): statuses = statuses or {'*'} labels = labels or {} labels['test-token'] = test_setup.TEST_TOKEN labels_set = {param_util.LabelParam(k, v) for (k, v) in labels.items()} return dstat.dstat_job_producer(provider=get_dsub_provider(), statuses=statuses, job_ids=job_ids, task_ids=task_ids, labels=labels_set, create_time_min=create_time_min, create_time_max=create_time_max, full_output=True).next()
def dstat_get_jobs(statuses=None, job_ids=None, task_ids=None, labels=None, create_time_min=None, create_time_max=None): statuses = statuses or {'*'} labels = labels or {} labels['test-token'] = test_setup.TEST_TOKEN labels['test-name'] = test_setup.TEST_NAME labels_set = {job_model.LabelParam(k, v) for (k, v) in labels.items()} return six.advance_iterator( dstat.dstat_job_producer(provider=get_dsub_provider(), statuses=statuses, job_ids=job_ids, task_ids=task_ids, labels=labels_set, create_time_min=create_time_min, create_time_max=create_time_max, full_output=True))