Пример #1
0
class AWSBstatCommand:
    """awsbstat command."""

    __JOB_CONVERTERS = {"SIMPLE": JobConverter(), "ARRAY": ArrayJobConverter(), "MNP": MNPJobConverter()}

    def __init__(self, log, boto3_factory):
        """
        Initialize the object.

        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict(
            [
                ("jobId", "id"),
                ("jobName", "name"),
                ("createdAt", "creation_time"),
                ("startedAt", "start_time"),
                ("stoppedAt", "stop_time"),
                ("status", "status"),
                ("statusReason", "status_reason"),
                ("jobDefinition", "job_definition"),
                ("jobQueue", "queue"),
                ("command", "command"),
                ("exitCode", "exit_code"),
                ("reason", "reason"),
                ("vcpus", "vcpus"),
                ("memory[MB]", "memory"),
                ("nodes", "nodes"),
                ("logStream", "log_stream"),
                ("log", "log_stream_url"),
                ("s3FolderUrl", "s3_folder_url"),
            ]
        )
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory
        self.batch_client = boto3_factory.get_client("batch")

    def run(self, job_status, expand_children, job_queue=None, job_ids=None, show_details=False):
        """Print list of jobs, by filtering by queue or by ids."""
        if job_ids:
            self.__populate_output_by_job_ids(job_ids, show_details or len(job_ids) == 1, include_parents=True)
            # explicitly asking for job details,
            # or asking for a single simple job (the output is not a list of jobs)
            details_required = show_details or (len(job_ids) == 1 and self.output.length() == 1)
        elif job_queue:
            self.__populate_output_by_queue(job_queue, job_status, expand_children, show_details)
            details_required = show_details
        else:
            fail("Error listing jobs from AWS Batch. job_ids or job_queue must be defined")

        sort_keys_function = self.__sort_by_status_startedat_jobid() if not job_ids else self.__sort_by_key(job_ids)
        if details_required:
            self.output.show(sort_keys_function=sort_keys_function)
        else:
            self.output.show_table(
                keys=["jobId", "jobName", "status", "startedAt", "stoppedAt", "exitCode"],
                sort_keys_function=sort_keys_function,
            )

    @staticmethod
    def __sort_by_key(ordered_keys):  # noqa: D202
        """
        Build a function to sort the output by key.

        :param ordered_keys: list containing the sorted keys.
        :return: a function to be used as key argument of the sorted function.
        """

        def _sort_by_key(item):
            job_id = item.id
            try:
                # in case the parent id was provided as input, sort children based on parent id position
                parent_id = re.findall(r"[\w-]+", job_id)[0]
                job_position = ordered_keys.index(parent_id)

            except ValueError:
                # in case the child id was provided as input, use its position in the list
                job_position = ordered_keys.index(job_id)

            return (
                # sort by id according to the order in the keys_order list
                job_position,
                # sort by full id (needed to have parent before children)
                job_id,
            )

        return _sort_by_key

    @staticmethod
    def __sort_by_status_startedat_jobid():
        """
        Build a function to sort the output by (status, startedAt, jobId).

        :return: a function to be used as key argument of the sorted function.
        """
        return lambda item: (
            # sort by status. Status order is defined by AWS_BATCH_JOB_STATUS.
            AWS_BATCH_JOB_STATUS.index(item.status),
            # sort by startedAt column.
            item.start_time,
            # sort by jobId column.
            item.id,
        )

    def __populate_output_by_job_ids(self, job_ids, details, include_parents=False):
        """
        Add Job item or jobs array children to the output.

        :param job_ids: job ids or ARNs
        :param details: ask for job details
        """
        try:
            if job_ids:
                self.log.info("Describing jobs (%s), details (%s)" % (job_ids, details))
                parent_jobs = []
                jobs_with_children = []
                jobs = self.__chunked_describe_jobs(job_ids)
                for job in jobs:
                    # always add parent job
                    if include_parents or get_job_type(job) == "SIMPLE":
                        parent_jobs.append(job)
                    if is_job_array(job):
                        jobs_with_children.append((job["jobId"], ":", job["arrayProperties"]["size"]))
                    elif is_mnp_job(job):
                        jobs_with_children.append((job["jobId"], "#", job["nodeProperties"]["numNodes"]))

                # add parent jobs to the output
                self.__add_jobs(parent_jobs)

                # create output items for jobs' children
                self.__populate_output_by_parent_ids(jobs_with_children)
        except Exception as e:
            fail("Error describing jobs from AWS Batch. Failed with exception: %s" % e)

    def __populate_output_by_parent_ids(self, parent_jobs):
        """
        Add jobs children to the output.

        :param parent_jobs: list of triplets (job_id, job_id_separator, job_size)
        """
        try:
            expanded_job_ids = []
            for parent_job in parent_jobs:
                expanded_job_ids.extend(
                    [
                        "{JOB_ID}{SEPARATOR}{INDEX}".format(JOB_ID=parent_job[0], SEPARATOR=parent_job[1], INDEX=i)
                        for i in range(0, parent_job[2])
                    ]
                )

            if expanded_job_ids:
                jobs = self.__chunked_describe_jobs(expanded_job_ids)

                # forcing details to be False since already retrieved.
                self.__add_jobs(jobs)
        except Exception as e:
            fail("Error listing job children. Failed with exception: %s" % e)

    def __chunked_describe_jobs(self, job_ids):
        """
        Submit calls to describe_jobs in batches of 100 elements each.

        describe_jobs API call has a hard limit on the number of job that can be
        retrieved with a single call. In case job_ids has more than 100 items, this function
        distributes the describe_jobs call across multiple requests.

        :param job_ids: list of ids for the jobs to describe.
        :return: list of described jobs.
        """
        jobs = []
        for index in range(0, len(job_ids), 100):
            jobs_chunk = job_ids[index : index + 100]  # noqa: E203
            jobs.extend(self.batch_client.describe_jobs(jobs=jobs_chunk)["jobs"])
        return jobs

    def __add_jobs(self, jobs, details=False):
        """
        Get job info from AWS Batch and add to the output.

        :param jobs: list of jobs items (output of the list_jobs function)
        :param details: ask for job details
        """
        try:
            if jobs:
                self.log.debug("Adding jobs to the output (%s)" % jobs)
                if details:
                    self.log.info("Asking for jobs details")
                    jobs_to_show = self.__chunked_describe_jobs([job["jobId"] for job in jobs])
                else:
                    jobs_to_show = jobs

                for job in jobs_to_show:
                    self.log.debug("Adding job to the output (%s)", job)

                    job_converter = self.__JOB_CONVERTERS[get_job_type(job)]

                    self.output.add(job_converter.convert(job))
        except KeyError as e:
            fail("Error building Job item. Key (%s) not found." % e)
        except Exception as e:
            fail("Error adding jobs to the output. Failed with exception: %s" % e)

    def __populate_output_by_queue(self, job_queue, job_status, expand_children, details):
        """
        Add Job items to the output asking for given queue and status.

        :param job_queue: job queue name or ARN
        :param job_status: list of job status to ask
        :param expand_children: if True, the job with children will be expanded by creating a row for each child
        :param details: ask for job details
        """
        try:
            single_jobs = []
            jobs_with_children = []
            for status in job_status:
                next_token = ""  # nosec
                while next_token is not None:
                    response = self.batch_client.list_jobs(jobStatus=status, jobQueue=job_queue, nextToken=next_token)

                    for job in response["jobSummaryList"]:
                        if get_job_type(job) != "SIMPLE" and expand_children is True:
                            jobs_with_children.append(job["jobId"])
                        else:
                            single_jobs.append(job)
                    next_token = response.get("nextToken")

            # create output items for job array children
            self.__populate_output_by_job_ids(jobs_with_children, details)

            # add single jobs to the output
            self.__add_jobs(single_jobs, details)

        except Exception as e:
            fail("Error listing jobs from AWS Batch. Failed with exception: %s" % e)
Пример #2
0
class AWSBstatCommand(object):
    """
    awsbstat command
    """
    def __init__(self, log, boto3_factory):
        """
        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict([('jobId', 'id'),
                                           ('jobName', 'name'),
                                           ('createdAt', 'creation_time'),
                                           ('startedAt', 'start_time'),
                                           ('stoppedAt', 'stop_time'),
                                           ('status', 'status'),
                                           ('statusReason', 'status_reason'),
                                           ('jobDefinition', 'job_definition'),
                                           ('jobQueue', 'queue'),
                                           ('command', 'command'),
                                           ('exitCode', 'exit_code'),
                                           ('reason', 'reason'),
                                           ('vcpus', 'vcpus'),
                                           ('memory[MB]', 'memory'),
                                           ('nodes', 'nodes'),
                                           ('logStream', 'log_stream'),
                                           ('log', 'log_stream_url')])
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory
        self.batch_client = boto3_factory.get_client('batch')

    def run(self,
            job_status,
            expand_arrays,
            job_queue=None,
            job_ids=None,
            show_details=False):
        """
        print list of jobs, by filtering by queue or by ids
        """
        if job_ids:
            self.__populate_output_by_job_ids(
                job_status, job_ids, show_details or len(job_ids) == 1)
            # explicitly asking for job details,
            # or asking for a single job that is not an array (the output is not a list of jobs)
            details_required = show_details or (len(job_ids) == 1
                                                and self.output.length() == 1)
        elif job_queue:
            self.__populate_output_by_queue(job_queue, job_status,
                                            expand_arrays, show_details)
            details_required = show_details
        else:
            fail(
                "Error listing jobs from AWS Batch. job_ids or job_queue must be defined"
            )

        if details_required:
            self.output.show()
        else:
            self.output.show_table([
                'jobId', 'jobName', 'status', 'startedAt', 'stoppedAt',
                'exitCode'
            ])

    def __populate_output_by_job_ids(self, job_status, job_ids, details):
        """
        Add Job item or jobs array children to the output
        :param job_status: list of job status to ask
        :param job_ids: job ids or ARNs
        :param details: ask for job details
        """
        try:
            if job_ids:
                self.log.info("Describing jobs (%s), details (%s)" %
                              (job_ids, details))
                single_jobs = []
                job_array_ids = []
                jobs = self.batch_client.describe_jobs(jobs=job_ids)['jobs']
                for job in jobs:
                    if is_job_array(job):
                        job_array_ids.append(job['jobId'])
                    else:
                        single_jobs.append(job)

                # create output items for job array children
                self.__populate_output_by_array_ids(job_status, job_array_ids,
                                                    details)

                # add single jobs to the output
                self.__add_jobs(single_jobs, details)
        except Exception as e:
            fail(
                "Error describing jobs from AWS Batch. Failed with exception: %s"
                % e)

    def __populate_output_by_array_ids(self, job_status, job_array_ids,
                                       details):
        """
        Add jobs array children to the output
        :param job_status: list of job status to ask
        :param job_array_ids: job array ids to ask
        :param details: ask for job details
        """
        try:
            for job_array_id in job_array_ids:
                for status in job_status:
                    self.log.info(
                        "Listing job array children for job (%s) in status (%s)"
                        % (job_array_id, status))
                    next_token = ''
                    while next_token is not None:
                        response = self.batch_client.list_jobs(
                            jobStatus=status,
                            arrayJobId=job_array_id,
                            nextToken=next_token)
                        # add single jobs to the output
                        self.__add_jobs(response['jobSummaryList'], details)
                        next_token = response.get('nextToken')
        except Exception as e:
            fail(
                "Error listing job array children for job (%s). Failed with exception: %s"
                % (job_array_id, e))

    def __add_jobs(self, jobs, details):
        """
        Get job info from AWS Batch and add to the output
        :param jobs: list of jobs items (output of the list_jobs function)
        :param details: ask for job details
        """
        try:
            if jobs:
                self.log.debug("Adding jobs to the output (%s)" % jobs)
                if details:
                    self.log.info("Asking for jobs details")
                    jobs_to_show = []
                    for index in range(0, len(jobs), 100):
                        jobs_chunk = jobs[index:index + 100]
                        job_ids = []
                        for job in jobs_chunk:
                            job_ids.append(job['jobId'])
                        jobs_to_show.extend(
                            self.batch_client.describe_jobs(
                                jobs=job_ids)['jobs'])
                else:
                    jobs_to_show = jobs

                for job in jobs_to_show:
                    nodes = 1
                    if 'nodeProperties' in job:
                        # MNP job
                        container = job['nodeProperties'][
                            'nodeRangeProperties'][0]['container']
                        nodes = job['nodeProperties']['numNodes']
                    elif 'container' in job:
                        container = job['container']
                    else:
                        container = {}

                    if is_job_array(job):
                        # parent job array
                        job_id = '{0}[{1}]'.format(
                            job['jobId'], job['arrayProperties']['size'])
                        log_stream = '-'
                        log_stream_url = '-'
                    else:
                        job_id = job['jobId']
                        if 'logStreamName' in container:
                            log_stream = container.get('logStreamName')
                            log_stream_url = _compose_log_stream_url(
                                self.boto3_factory.region, log_stream)
                        else:
                            log_stream = '-'
                            log_stream_url = '-'

                    command = container.get('command', [])
                    self.log.debug("Adding job to the output (%s)", job)
                    job = Job(job_id=job_id,
                              name=job['jobName'],
                              creation_time=convert_to_date(job['createdAt']),
                              start_time=convert_to_date(job['startedAt'])
                              if 'startedAt' in job else '-',
                              stop_time=convert_to_date(job['stoppedAt'])
                              if 'stoppedAt' in job else '-',
                              status=job.get('status', 'UNKNOWN'),
                              status_reason=job.get('statusReason', '-'),
                              job_definition=get_job_definition_name_by_arn(
                                  job['jobDefinition'], version=True)
                              if 'jobQueue' in job else '-',
                              queue=job['jobQueue'].split('/')[1]
                              if 'jobQueue' in job else '-',
                              command=shell_join(command) if command else '-',
                              reason=container.get('reason', '-'),
                              exit_code=container.get('exitCode', '-'),
                              vcpus=container.get('vcpus', '-'),
                              memory=container.get('memory', '-'),
                              nodes=nodes,
                              log_stream=log_stream,
                              log_stream_url=log_stream_url)
                    self.output.add(job)
        except KeyError as e:
            fail("Error building Job item. Key (%s) not found." % e)
        except Exception as e:
            fail("Error adding jobs to the output. Failed with exception: %s" %
                 e)

    def __populate_output_by_queue(self, job_queue, job_status, expand_arrays,
                                   details):
        """
        Add Job items to the output asking for given queue and status
        :param job_queue: job queue name or ARN
        :param job_status: list of job status to ask
        :param expand_arrays: if True, the job array will be expanded by creating a row for each child
        :param details: ask for job details
        """
        try:
            for status in job_status:
                next_token = ''
                while next_token is not None:
                    response = self.batch_client.list_jobs(
                        jobStatus=status,
                        jobQueue=job_queue,
                        nextToken=next_token)
                    single_jobs = []
                    job_array_ids = []
                    for job in response['jobSummaryList']:
                        if is_job_array(job) and expand_arrays is True:
                            job_array_ids.append(job['jobId'])
                        else:
                            single_jobs.append(job)

                    # create output items for job array children
                    self.__populate_output_by_job_ids(job_status,
                                                      job_array_ids, details)

                    # add single jobs to the output
                    self.__add_jobs(single_jobs, details)

                    next_token = response.get('nextToken')
        except Exception as e:
            fail("Error listing jobs from AWS Batch. Failed with exception: %s"
                 % e)
Пример #3
0
class AWSBqueuesCommand(object):
    """awsbqueues command."""
    def __init__(self, log, boto3_factory):
        """
        Constructor.

        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict([
            ("jobQueueArn", "arn"),
            ("jobQueueName", "name"),
            ("priority", "priority"),
            ("status", "status"),
            ("statusReason", "status_reason"),
        ])
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory

    def run(self, job_queues, show_details=False):
        """Print list of queues."""
        self.__init_output(job_queues)
        if show_details:
            self.output.show()
        else:
            self.output.show_table(["jobQueueName", "status"])

    def __init_output(self, job_queues):
        """
        Initialize queues output by asking for given queues.

        :param job_queues: a list of job queues
        """
        try:
            # connect to batch and get queues
            batch_client = self.boto3_factory.get_client("batch")
            queues = batch_client.describe_job_queues(
                jobQueues=job_queues)["jobQueues"]
            self.log.info("Job Queues: %s" % job_queues)
            self.log.debug(queues)

            for queue in queues:
                self.output.add(self.__new_queue(queue=queue))

        except Exception as e:
            fail(
                "Error listing queues from AWS Batch. Failed with exception: %s"
                % e)

    @staticmethod
    def __new_queue(queue):
        """
        Parse jobQueue and return a Queue object.

        :param queue: the jobQueue object to parse
        :return: a Queue object
        """
        try:
            return Queue(
                arn=queue["jobQueueArn"],
                name=queue["jobQueueName"],
                priority=queue["priority"],
                status=queue["status"],
                status_reason=queue["statusReason"],
            )
        except KeyError as e:
            fail("Error building Queue item. Key (%s) not found." % e)
Пример #4
0
class AWSBqueuesCommand(object):
    """awsbqueues command."""

    def __init__(self, log, boto3_factory):
        """
        Constructor.

        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict(
            [
                ("jobQueueArn", "arn"),
                ("jobQueueName", "name"),
                ("priority", "priority"),
                ("status", "status"),
                ("statusReason", "status_reason"),
            ]
        )
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory

    def run(self, job_queues, show_details=False):
        """Print list of queues."""
        self.__init_output(job_queues)
        if show_details:
            self.output.show()
        else:
            self.output.show_table(["jobQueueName", "status"])

    def __init_output(self, job_queues):
        """
        Initialize queues output by asking for given queues.

        :param job_queues: a list of job queues
        """
        try:
            # connect to batch and get queues
            batch_client = self.boto3_factory.get_client("batch")
            queues = batch_client.describe_job_queues(jobQueues=job_queues)["jobQueues"]
            self.log.info("Job Queues: %s" % job_queues)
            self.log.debug(queues)

            for queue in queues:
                self.output.add(self.__new_queue(queue=queue))

        except Exception as e:
            fail("Error listing queues from AWS Batch. Failed with exception: %s" % e)

    @staticmethod
    def __new_queue(queue):
        """
        Parse jobQueue and return a Queue object.

        :param queue: the jobQueue object to parse
        :return: a Queue object
        """
        try:
            return Queue(
                arn=queue["jobQueueArn"],
                name=queue["jobQueueName"],
                priority=queue["priority"],
                status=queue["status"],
                status_reason=queue["statusReason"],
            )
        except KeyError as e:
            fail("Error building Queue item. Key (%s) not found." % e)
Пример #5
0
class AWSBhostsCommand(object):
    """awsbhosts command."""
    def __init__(self, log, boto3_factory):
        """
        Initialize the object.

        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict([
            ("ec2InstanceId", "ec2_instance"),
            ("containerInstanceArn", "container_instance_arn"),
            ("status", "status"),
            ("instanceType", "instance_type"),
            ("privateIpAddress", "private_ip_address"),
            ("publicIpAddress", "public_ip_address"),
            ("privateDnsName", "private_dns_name"),
            ("publicDnsName", "public_dns_name"),
            ("runningJobs", "running_jobs"),
            ("pendingJobs", "pending_jobs"),
            ("registeredCPUs", "cpu_registered"),
            ("registeredMemory[MB]", "mem_registered"),
            ("availableCPUs", "cpu_avail"),
            ("availableMemory[MB]", "mem_avail"),
        ])
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory
        self.ecs_client = boto3_factory.get_client("ecs")

    def run(self, compute_environments, show_details=False, instance_ids=None):
        """
        Print list of hosts associated to the compute environments.

        :param compute_environments: a list of compute environments
        :param show_details: show compute environment details
        :param instance_ids: instances to query
        """
        self.__init_output(compute_environments, instance_ids)
        if show_details or instance_ids:
            self.output.show()
        else:
            self.output.show_table([
                "ec2InstanceId", "instanceType", "privateIpAddress",
                "publicIpAddress", "runningJobs"
            ])

    def __init_output(self, compute_environments, instance_ids=None):
        """
        Initialize host output by asking hosts associated to the given compute environments.

        :param compute_environments: a list of compute environments
        :param instance_ids: requested hosts
        """
        ecs_clusters = self.__get_ecs_clusters(compute_environments)
        try:
            for ecs_cluster in ecs_clusters:
                self.log.info("Cluster ARN = %s" % ecs_cluster)
                paginator = self.ecs_client.get_paginator(
                    "list_container_instances")
                for page in paginator.paginate(cluster=ecs_cluster):
                    self._add_host_items(ecs_cluster,
                                         page["containerInstanceArns"],
                                         instance_ids)
        except Exception as e:
            fail(
                "Error listing container instances from AWS ECS. Failed with exception: %s"
                % e)

    @staticmethod
    def __create_host_item(container_instance, ec2_instance):
        """
        Merge container instance and ec2 instance information and create a Host item.

        :param container_instance: the containerInstance object to parse
        :param ec2_instance: the ec2Instance object to parse
        :return: the Host item
        """
        try:
            instance_type = AWSBhostsCommand.__get_instance_attribute(
                container_instance["attributes"], "ecs.instance-type")
            cpu_registered, mem_registered = AWSBhostsCommand.__get_cpu_and_memory(
                container_instance["registeredResources"])
            cpu_avail, mem_avail = AWSBhostsCommand.__get_cpu_and_memory(
                container_instance["remainingResources"])
            return Host(
                container_instance_arn=container_instance[
                    "containerInstanceArn"],
                status=container_instance["status"],
                ec2_instance=container_instance["ec2InstanceId"],
                instance_type=instance_type,
                private_ip_address=ec2_instance["PrivateIpAddress"],
                public_ip_address=ec2_instance["PublicIpAddress"]
                if ec2_instance.get("PublicIpAddress") else "-",
                private_dns_name=ec2_instance["PrivateDnsName"],
                public_dns_name=ec2_instance["PublicDnsName"]
                if ec2_instance.get("PublicDnsName") else "-",
                running_jobs=container_instance["runningTasksCount"],
                pending_jobs=container_instance["pendingTasksCount"],
                cpu_registered=cpu_registered,
                mem_registered=mem_registered,
                cpu_avail=cpu_avail,
                mem_avail=mem_avail,
            )
        except KeyError as e:
            fail("Error building Host item. Key (%s) not found." % e)

    @staticmethod
    def __get_instance_attribute(attributes, attribute_name):
        """
        Get container instance attribute by name.

        :param attributes: list of attributes
        :param attribute_name: name of the attribute
        :return: the attribute value
        """
        attr_value = "-"
        for attr in attributes:
            if attr["name"] == attribute_name:
                attr_value = attr["value"]
                break
        return attr_value

    @staticmethod
    def __get_cpu_and_memory(resources):
        """
        Get CPU and MEMORY information from given resources object.

        :param resources: resources json object
        :return: cpu and memory
        """
        cpu = "-"
        memory = "-"
        for resource in resources:
            if resource["name"] == "CPU":
                cpu = resource["integerValue"] / 1024
            elif resource["name"] == "MEMORY":
                memory = resource["integerValue"]
        return cpu, memory

    def _add_host_items(self,
                        ecs_cluster_arn,
                        container_instances_arns,
                        instance_ids=None):
        """
        Add a list of Hosts to the output.

        :param ecs_cluster_arn: ECS Cluster arn
        :param container_instances_arns: container ids
        :param instance_ids: hosts requested
        """
        self.log.info("Container ARNs = %s" % container_instances_arns)
        if container_instances_arns:
            response = self.ecs_client.describe_container_instances(
                cluster=ecs_cluster_arn,
                containerInstances=container_instances_arns)
            container_instances = response["containerInstances"]
            self.log.debug("Container Instances = %s" % container_instances)
            # get ec2_instance_ids
            ec2_instances_ids = []
            for container_instance in container_instances:
                ec2_instances_ids.append(container_instance["ec2InstanceId"])

            # get ec2 instances information
            ec2_instances = {}
            try:
                ec2_client = self.boto3_factory.get_client("ec2")
                paginator = ec2_client.get_paginator("describe_instances")
                for page in paginator.paginate(InstanceIds=ec2_instances_ids):
                    for reservation in page["Reservations"]:
                        for instance in reservation["Instances"]:
                            ec2_instances[instance["InstanceId"]] = instance
            except Exception as e:
                fail(
                    "Error listing EC2 instances from AWS EC2. Failed with exception: %s"
                    % e)

            # merge ec2 and container information
            for container_instance in container_instances:
                ec2_instance_id = container_instance["ec2InstanceId"]
                # filter by instance_id if there
                if not instance_ids or ec2_instance_id in instance_ids:
                    self.log.debug("Container Instance = %s" %
                                   container_instance)
                    self.log.debug("EC2 Instance = %s" %
                                   ec2_instances[ec2_instance_id])
                    self.output.add(
                        self.__create_host_item(
                            container_instance,
                            ec2_instances[ec2_instance_id]))

    @staticmethod
    def __get_clusters(compute_environments):
        """
        Parse computeEnvironments object and return a list of ecsClusterArn.

        :param compute_environments: a list of Compute Environments
        :return: a list of ECS clusters
        """
        ecs_clusters = []
        for compute_env in compute_environments:
            ecs_clusters.append(compute_env["ecsClusterArn"])
        return ecs_clusters

    def __get_ecs_clusters(self, compute_environments):
        """
        Get Compute Environments from AWS Batch and create a list of ECS Cluster ARNs.

        :param compute_environments: compute environments to query
        :return: a list of ECS clusters
        """
        ecs_clusters = []
        try:
            # connect to batch and ask for compute environments
            batch_client = self.boto3_factory.get_client("batch")
            next_token = ""
            while next_token is not None:
                response = batch_client.describe_compute_environments(
                    computeEnvironments=compute_environments,
                    nextToken=next_token)
                ecs_clusters.extend(
                    self.__get_clusters(response["computeEnvironments"]))
                next_token = response.get("nextToken")
        except Exception as e:
            fail(
                "Error listing compute environments from AWS Batch. Failed with exception: %s"
                % e)

        return ecs_clusters
Пример #6
0
class AWSBhostsCommand(object):
    """
    awsbhosts command
    """
    def __init__(self, log, boto3_factory):
        """
        :param log: log
        :param boto3_factory: an initialized Boto3ClientFactory object
        """
        self.log = log
        mapping = collections.OrderedDict([
            ('containerInstanceArn', 'container_instance_arn'),
            ('status', 'status'), ('ec2InstanceId', 'ec2_instance'),
            ('instanceType', 'instance_type'),
            ('privateIpAddress', 'private_ip_address'),
            ('publicIpAddress', 'public_ip_address'),
            ('privateDnsName', 'private_dns_name'),
            ('publicDnsName', 'public_dns_name'),
            ('runningJobs', 'running_jobs'), ('pendingJobs', 'pending_jobs')
        ])
        self.output = Output(mapping=mapping)
        self.boto3_factory = boto3_factory
        self.ecs_client = boto3_factory.get_client('ecs')

    def run(self, compute_environments, show_details=False, instance_ids=None):
        """
        print list of hosts associated to the compute environments
        :param compute_environments: a list of compute environments
        :param show_details: show compute environment details
        :param instance_ids: instances to query
        """
        self.__init_output(compute_environments, instance_ids)
        if show_details or instance_ids:
            self.output.show()
        else:
            self.output.show_table([
                'ec2InstanceId', 'instanceType', 'privateIpAddress',
                'publicIpAddress', 'runningJobs'
            ])

    def __init_output(self, compute_environments, instance_ids=None):
        """
        Initialize host output by asking hosts associated to the given compute environments
        :param compute_environments: a list of compute environments
        :param instance_ids: requested hosts
        """
        ecs_clusters = self.__get_ecs_clusters(compute_environments)
        try:
            for ecs_cluster in ecs_clusters:
                self.log.info('Cluster ARN = %s' % ecs_cluster)
                paginator = self.ecs_client.get_paginator(
                    'list_container_instances')
                for page in paginator.paginate(cluster=ecs_cluster):
                    self._add_host_items(ecs_cluster,
                                         page['containerInstanceArns'],
                                         instance_ids)
        except Exception as e:
            fail(
                "Error listing container instances from AWS ECS. Failed with exception: %s"
                % e)

    @staticmethod
    def __create_host_item(container_instance, ec2_instance):
        """
        Merge container instance and ec2 instance information and create a Host item.
        :param container_instance: the containerInstance object to parse
        :param ec2_instance: the ec2Instance object to parse
        :return: the Host item
        """
        try:
            instance_type = '-'
            for attr in container_instance['attributes']:
                if attr['name'] == 'ecs.instance-type':
                    instance_type = attr['value']
                    break

            return Host(container_instance_arn=container_instance[
                'containerInstanceArn'],
                        status=container_instance['status'],
                        ec2_instance=container_instance['ec2InstanceId'],
                        instance_type=instance_type,
                        private_ip_address=ec2_instance['PrivateIpAddress'],
                        public_ip_address=ec2_instance['PublicIpAddress']
                        if ec2_instance['PublicIpAddress'] != '' else '-',
                        private_dns_name=ec2_instance['PrivateDnsName'],
                        public_dns_name=ec2_instance['PublicDnsName']
                        if ec2_instance['PublicDnsName'] != '' else '-',
                        running_jobs=container_instance['runningTasksCount'],
                        pending_jobs=container_instance['pendingTasksCount'])
        except KeyError as e:
            fail("Error building Host item. Key (%s) not found." % e)

    def _add_host_items(self,
                        ecs_cluster_arn,
                        container_instances_arns,
                        instance_ids=None):
        """
        Add a list of Hosts to the output
        :param ecs_cluster_arn: ECS Cluster arn
        :param container_instances_arns: container ids
        :param instance_ids: hosts requested
        """
        self.log.info('Container ARNs = %s' % container_instances_arns)
        if container_instances_arns:
            response = self.ecs_client.describe_container_instances(
                cluster=ecs_cluster_arn,
                containerInstances=container_instances_arns)
            container_instances = response['containerInstances']
            self.log.debug('Container Instances = %s' % container_instances)
            # get ec2_instance_ids
            ec2_instances_ids = []
            for container_instance in container_instances:
                ec2_instances_ids.append(container_instance['ec2InstanceId'])

            # get ec2 instances information
            ec2_instances = {}
            try:
                ec2_client = self.boto3_factory.get_client('ec2')
                paginator = ec2_client.get_paginator('describe_instances')
                for page in paginator.paginate(InstanceIds=ec2_instances_ids):
                    for reservation in page['Reservations']:
                        for instance in reservation['Instances']:
                            ec2_instances[instance['InstanceId']] = instance
            except Exception as e:
                fail(
                    "Error listing EC2 instances from AWS EC2. Failed with exception: %s"
                    % e)

            # merge ec2 and container information
            for container_instance in container_instances:
                ec2_instance_id = container_instance['ec2InstanceId']
                # filter by instance_id if there
                if not instance_ids or ec2_instance_id in instance_ids:
                    self.log.debug('Container Instance = %s' %
                                   container_instance)
                    self.log.debug('EC2 Instance = %s' %
                                   ec2_instances[ec2_instance_id])
                    self.output.add(
                        self.__create_host_item(
                            container_instance,
                            ec2_instances[ec2_instance_id]))

    @staticmethod
    def __get_clusters(compute_environments):
        """
        Parse computeEnvironments object and return a list of ecsClusterArn.
        :param compute_environments: a list of Compute Environments
        :return: a list of ECS clusters
        """
        ecs_clusters = []
        for compute_env in compute_environments:
            ecs_clusters.append(compute_env['ecsClusterArn'])
        return ecs_clusters

    def __get_ecs_clusters(self, compute_environments):
        """
        Get Compute Environments from AWS Batch and create a list of ECS Cluster ARNs.
        :param compute_environments: compute environments to query
        :return: a list of ECS clusters
        """
        ecs_clusters = []
        try:
            # connect to batch and ask for compute environments
            batch_client = self.boto3_factory.get_client('batch')
            next_token = ''
            while next_token is not None:
                response = batch_client.describe_compute_environments(
                    computeEnvironments=compute_environments,
                    nextToken=next_token)
                ecs_clusters.extend(
                    self.__get_clusters(response['computeEnvironments']))
                next_token = response.get('nextToken')
        except Exception as e:
            fail(
                "Error listing compute environments from AWS Batch. Failed with exception: %s"
                % e)

        return ecs_clusters