def __populate_output_by_job_ids(self, job_ids, details, include_parents=False): """ Add Job item or jobs array children to the output. :param job_ids: job ids or ARNs :param details: ask for job details """ try: if job_ids: self.log.info("Describing jobs (%s), details (%s)" % (job_ids, details)) parent_jobs = [] jobs_with_children = [] jobs = self.__chunked_describe_jobs(job_ids) for job in jobs: # always add parent job if include_parents or get_job_type(job) == "SIMPLE": parent_jobs.append(job) if is_job_array(job): jobs_with_children.append((job["jobId"], ":", job["arrayProperties"]["size"])) elif is_mnp_job(job): jobs_with_children.append((job["jobId"], "#", job["nodeProperties"]["numNodes"])) # add parent jobs to the output self.__add_jobs(parent_jobs) # create output items for jobs' children self.__populate_output_by_parent_ids(jobs_with_children) except Exception as e: fail("Error describing jobs from AWS Batch. Failed with exception: %s" % e)
def __populate_output_by_job_ids(self, job_status, job_ids, details): """ Add Job item or jobs array children to the output :param job_status: list of job status to ask :param job_ids: job ids or ARNs :param details: ask for job details """ try: if job_ids: self.log.info("Describing jobs (%s), details (%s)" % (job_ids, details)) single_jobs = [] job_array_ids = [] jobs = self.batch_client.describe_jobs(jobs=job_ids)['jobs'] for job in jobs: if is_job_array(job): job_array_ids.append(job['jobId']) else: single_jobs.append(job) # create output items for job array children self.__populate_output_by_array_ids(job_status, job_array_ids, details) # add single jobs to the output self.__add_jobs(single_jobs, details) except Exception as e: fail( "Error describing jobs from AWS Batch. Failed with exception: %s" % e)
def __get_log_stream(self, job_id): """ Get log stream for the given job :param job_id: job id (ARN) :return: the log_stream if there, or None """ log_stream = None try: batch_client = self.boto3_factory.get_client('batch') jobs = batch_client.describe_jobs(jobs=[job_id])['jobs'] if len(jobs) == 1: job = jobs[0] self.log.debug(job) if 'nodeProperties' in job: # MNP job container = job['nodeProperties']['nodeRangeProperties'][ 0]['container'] elif 'container' in job: container = job['container'] else: container = {} if is_job_array(job): fail( "No output available for the Job Array (%s). Please ask for array children." % job['jobId']) else: if 'logStreamName' in container: log_stream = container.get('logStreamName') else: print( "No log stream found for job (%s) in the status (%s)" % (job_id, job['status'])) else: fail("Error asking job output for job (%s). Job not found." % job_id) except Exception as e: fail("Error listing jobs from AWS Batch. Failed with exception: %s" % e) return log_stream
def __populate_output_by_queue(self, job_queue, job_status, expand_arrays, details): """ Add Job items to the output asking for given queue and status :param job_queue: job queue name or ARN :param job_status: list of job status to ask :param expand_arrays: if True, the job array will be expanded by creating a row for each child :param details: ask for job details """ try: for status in job_status: next_token = '' while next_token is not None: response = self.batch_client.list_jobs( jobStatus=status, jobQueue=job_queue, nextToken=next_token) single_jobs = [] job_array_ids = [] for job in response['jobSummaryList']: if is_job_array(job) and expand_arrays is True: job_array_ids.append(job['jobId']) else: single_jobs.append(job) # create output items for job array children self.__populate_output_by_job_ids(job_status, job_array_ids, details) # add single jobs to the output self.__add_jobs(single_jobs, details) next_token = response.get('nextToken') except Exception as e: fail("Error listing jobs from AWS Batch. Failed with exception: %s" % e)
def __add_jobs(self, jobs, details): """ Get job info from AWS Batch and add to the output :param jobs: list of jobs items (output of the list_jobs function) :param details: ask for job details """ try: if jobs: self.log.debug("Adding jobs to the output (%s)" % jobs) if details: self.log.info("Asking for jobs details") jobs_to_show = [] for index in range(0, len(jobs), 100): jobs_chunk = jobs[index:index + 100] job_ids = [] for job in jobs_chunk: job_ids.append(job['jobId']) jobs_to_show.extend( self.batch_client.describe_jobs( jobs=job_ids)['jobs']) else: jobs_to_show = jobs for job in jobs_to_show: nodes = 1 if 'nodeProperties' in job: # MNP job container = job['nodeProperties'][ 'nodeRangeProperties'][0]['container'] nodes = job['nodeProperties']['numNodes'] elif 'container' in job: container = job['container'] else: container = {} if is_job_array(job): # parent job array job_id = '{0}[{1}]'.format( job['jobId'], job['arrayProperties']['size']) log_stream = '-' log_stream_url = '-' else: job_id = job['jobId'] if 'logStreamName' in container: log_stream = container.get('logStreamName') log_stream_url = _compose_log_stream_url( self.boto3_factory.region, log_stream) else: log_stream = '-' log_stream_url = '-' command = container.get('command', []) self.log.debug("Adding job to the output (%s)", job) job = Job(job_id=job_id, name=job['jobName'], creation_time=convert_to_date(job['createdAt']), start_time=convert_to_date(job['startedAt']) if 'startedAt' in job else '-', stop_time=convert_to_date(job['stoppedAt']) if 'stoppedAt' in job else '-', status=job.get('status', 'UNKNOWN'), status_reason=job.get('statusReason', '-'), job_definition=get_job_definition_name_by_arn( job['jobDefinition'], version=True) if 'jobQueue' in job else '-', queue=job['jobQueue'].split('/')[1] if 'jobQueue' in job else '-', command=shell_join(command) if command else '-', reason=container.get('reason', '-'), exit_code=container.get('exitCode', '-'), vcpus=container.get('vcpus', '-'), memory=container.get('memory', '-'), nodes=nodes, log_stream=log_stream, log_stream_url=log_stream_url) self.output.add(job) except KeyError as e: fail("Error building Job item. Key (%s) not found." % e) except Exception as e: fail("Error adding jobs to the output. Failed with exception: %s" % e)