def crawl(self, container_id=None, **kwargs):

        c = dockercontainer.DockerContainer(container_id)

        # check image name
        if c.image_name.find("httpd") == -1:

            logger.error("%s is not %s container",
                         c.image_name,
                         self.feature_key)
            raise CrawlError("%s does not have expected name for %s (name=%s)",
                             container_id,
                             self.feature_key,
                             c.image_name)

        # extract IP and Port information
        ip = c.get_container_ip()
        ports = c.get_container_ports()

        # crawl all candidate ports
        for port in ports:
            try:
                metrics = apache_crawler.retrieve_metrics(ip, port)
            except CrawlError:
                logger.error("can't find metrics endpoint at http://%s:%s",
                             ip,
                             port)
                continue
            return [(self.feature_key, metrics, self.feature_type)]

        raise CrawlError("%s has no accessible endpoint for %s",
                         container_id,
                         self.feature_key)
    def crawl(self, container_id=None, **kwargs):

        import pip
        pip.main(['install', 'redis'])
        import redis

        # only crawl redis container. Otherwise, quit.
        c = dockercontainer.DockerContainer(container_id)
        if c.image_name.find(self.feature_key) == -1:
            logger.debug("%s is not %s container" %
                         (c.image_name, self.feature_key))
            raise NameError("this is not target crawl container")

        # extract IP and Port information
        ip = c.get_container_ip()
        ports = c.get_container_ports()

        # set default port number
        if len(ports) == 0:
            ports.append(self.default_port)

        # query to all available ports
        for port in ports:
            client = redis.Redis(host=ip, port=port)
            try:
                metrics = client.info()
            except ConnectionError:
                logger.info("redis does not listen on port:%d", port)
                continue
            feature_attributes = feature.create_feature(metrics)
            return [(self.feature_key, feature_attributes, self.feature_type)]

        # any ports are not available
        raise ConnectionError("no listen ports")
Ejemplo n.º 3
0
    def crawl(self, container_id=None, **kwargs):

        try:
            import redis
        except ImportError:
            import pip
            pip.main(['install', 'redis'])
            import redis

        # only crawl redis container. Otherwise, quit.
        c = dockercontainer.DockerContainer(container_id)
        port = self.get_port(c)

        if not port:
            return

        state = c.inspect['State']
        pid = str(state['Pid'])
        ips = run_as_another_namespace(pid, ['net'],
                                       utils.misc.get_host_ip4_addresses)

        for each_ip in ips:
            if each_ip != "127.0.0.1":
                ip = each_ip
                break

        client = redis.Redis(host=ip, port=port)

        try:
            metrics = client.info()
            feature_attributes = feature.create_feature(metrics)
            return [(self.feature_key, feature_attributes, self.feature_type)]
        except:
            logger.info("redis does not listen on port:%d", port)
            raise ConnectionError("no listen at %d", port)
Ejemplo n.º 4
0
    def crawl(self, container_id=None, **kwargs):
        password = "******"
        user = "******"

        if "password" in kwargs:
            password = kwargs["password"]

        if "user" in kwargs:
            user = kwargs["user"]

        c = dockercontainer.DockerContainer(container_id)

        # check image name
        if c.image_name.find(self.feature_key) == -1:
            logger.error("%s is not %s container", c.image_name,
                         self.feature_key)
            raise CrawlError("%s does not have expected name for %s (name=%s)",
                             container_id, self.feature_key, c.image_name)

        # extract IP and Port information
        ip = c.get_container_ip()
        ports = c.get_container_ports()

        # crawl all candidate ports
        for each_port in ports:
            return liberty_crawler.retrieve_metrics(
                host=ip,
                port=each_port,
                user=user,
                password=password,
                feature_type=self.feature_type)

        raise CrawlError("%s has no accessible endpoint for %s", container_id,
                         self.feature_key)
Ejemplo n.º 5
0
    def crawl(self, container_id=None, **kwargs):

        password, user, db = self.get_opt(kwargs)
        c = dockercontainer.DockerContainer(container_id)

        port = None

        if "annotation.io.kubernetes.container.ports" in\
                c.inspect['Config']['Labels']:

            ports = c.inspect['Config']['Labels'][
                'annotation.io.kubernetes.container.ports']

            ports = json.loads(ports)

        else:
            ports = c.get_container_ports()

        for each_port in ports:
            tmp_port = None
            if "containerPort" in each_port:
                tmp_port = int(each_port['containerPort'])
            else:
                tmp_port = int(each_port)

            if tmp_port == self.default_port:
                port = tmp_port

        if not port:
            return

        state = c.inspect['State']
        pid = str(state['Pid'])
        ips = run_as_another_namespace(pid, ['net'],
                                       utils.misc.get_host_ip4_addresses)

        for each_ip in ips:
            if each_ip != "127.0.0.1":
                ip = each_ip
                break

        try:
            metrics = db2_crawler.retrieve_metrics(
                host=ip,
                user=user,
                password=password,
                db=db,
            )
            return [(self.feature_key, metrics, self.feature_type)]
        except:
            logger.info("db2 does not listen on port:%d", port)
            raise ConnectionError("db2 does not listen on port:%d", port)
Ejemplo n.º 6
0
    def crawl(self, container_id=None, **kwargs):

        password, user = self.get_opt(kwargs)
        c = dockercontainer.DockerContainer(container_id)

        port = None

        if "annotation.io.kubernetes.container.ports" in\
                c.inspect['Config']['Labels']:

            ports = c.inspect['Config']['Labels'][
                'annotation.io.kubernetes.container.ports']

            ports = json.loads(ports)

        else:
            ports = c.get_container_ports()

        for each_port in ports:
            tmp_port = None
            if "containerPort" in each_port:
                tmp_port = int(each_port['containerPort'])
            else:
                tmp_port = int(each_port)

            if tmp_port == self.default_port:
                port = tmp_port

        if not port:
            return

        state = c.inspect['State']
        pid = str(state['Pid'])
        ips = run_as_another_namespace(pid, ['net'],
                                       utils.misc.get_host_ip4_addresses)

        for each_ip in ips:
            if each_ip != "127.0.0.1":
                ip = each_ip
                break

        # crawl all candidate ports
        try:
            return tomcat_crawler.retrieve_metrics(
                host=ip,
                port=port,
                user=user,
                password=password,
                feature_type=self.feature_type)
        except:
            raise ConnectionError("%s has no accessible endpoint for %s",
                                  container_id, self.feature_key)
Ejemplo n.º 7
0
    def test_get_logfiles_list(self, *args):

        inspect = {
            "Id": ("1e744b5e3e11e848863fefe9d9a8b3731070c6b0c702a04d2b8ab948ea"
                   "24e847"),
            "Created": "2016-07-06T16:38:05.479090842Z",
            "State": {
                "Status": "running",
                "Running": True,
                "Pid": 11186},
            "Image": ("sha256:07c86167cdc4264926fa5d2894e34a339ad27f730e8cc81a"
                      "16cd21b7479e8eac"),
            "Name": "/pensive_rosalind",
            "LogPath": ("/var/lib/docker/containers/1e744b5e3e11e848863fefe9d9"
                        "a8b3731070c6b0c702a04d2b8ab948ea24e847/1e744b5e3e11e8"
                        "48863fefe9d9a8b3731070c6b0c702a04d2b8ab948ea24e847"
                        "-json.log"),
            "HostnamePath": ("/var/lib/docker/containers/1e744b5e3e11e848863fe"
                             "fe9d9a8b3731070c6b0c702a04d2b8ab948ea24e847"
                             "/hostname"),
            "Mounts": [
                {
                    "Source": self.volume,
                    "Destination": "/data"}],
            "Config": {
                "Cmd": ["bash"],
                "Image": "ubuntu:trusty"},
            "docker_image_long_name": "long_name/short_name",
            "docker_image_short_name": "short_name",
            "docker_image_tag": "image_tag",
            "docker_image_registry": "image_registry",
            "owner_namespace": "owner_namespace",
            "NetworkSettings": {}}

        plugins_manager.runtime_env = None
        self.docker_container = \
            dockercontainer.DockerContainer(inspect['Id'], inspect)

        self.docker_container._get_container_log_files = \
            get_container_log_files
        self.docker_container.log_file_list = [
            {'name': '/data/test1.log', 'type': None}]

        log_list = self.docker_container._set_logs_list()
        log_list = self.docker_container.logs_list
        for log in log_list:
            if log.name == '/data/test1.log':
                self.assertEqual(
                    log.dest, self.host_log_dir + '/data/test1.log')
                self.assertEqual(log.source,
                                 self.volume + '/test1.log')
Ejemplo n.º 8
0
    def crawl(self, container_id=None, **kwargs):
        c = dockercontainer.DockerContainer(container_id)

        port = None

        if "annotation.io.kubernetes.container.ports" in\
                c.inspect['Config']['Labels']:

            ports = c.inspect['Config']['Labels'][
                'annotation.io.kubernetes.container.ports']

            ports = json.loads(ports)

        else:
            ports = c.get_container_ports()

        for each_port in ports:
            tmp_port = None
            if "containerPort" in each_port:
                tmp_port = int(each_port['containerPort'])
            else:
                tmp_port = int(each_port)

            if tmp_port == self.default_port:
                port = tmp_port

        if not port:
            return

        state = c.inspect['State']
        pid = str(state['Pid'])
        ips = run_as_another_namespace(pid, ['net'],
                                       utils.misc.get_host_ip4_addresses)

        for each_ip in ips:
            if each_ip != "127.0.0.1":
                ip = each_ip
                break

        # crawl all candidate ports
        try:
            metrics = nginx_crawler.retrieve_metrics(ip, port)
            return [(self.feature_key, metrics, self.feature_type)]
        except:
            logger.error("can't find metrics endpoint at http://%s:%s", ip,
                         port)
            raise ConnectionError(
                "can't find metrics endpoint"
                "at http://%s:%s", ip, port)
    def crawl(self, container_id=None, **kwargs):
        password = "******"
        user = "******"
        db = "sample"

        if "password" in kwargs:
            password = kwargs["password"]

        if "user" in kwargs:
            user = kwargs["user"]

        if "db" in kwargs:
            db = kwargs["db"]

        c = dockercontainer.DockerContainer(container_id)

        # check image name
        if c.image_name.find(self.feature_key) == -1:
            logger.error("%s is not %s container",
                         c.image_name,
                         self.feature_key)
            raise CrawlError("%s does not have expected name for %s (name=%s)",
                             container_id,
                             self.feature_key,
                             c.image_name)

        # extract IP and Port information
        ip = c.get_container_ip()
        ports = c.get_container_ports()

        # crawl all candidate ports
        for each_port in ports:
            try:
                metrics = db2_crawler.retrieve_metrics(
                    host=ip,
                    user=user,
                    password=password,
                    db=db,
                )
            except CrawlError:
                logger.error("can't find metrics endpoint at %s db %s",
                             ip, db)
                continue
            return [(self.feature_key, metrics, self.feature_type)]

        raise CrawlError("%s has no accessible endpoint for %s",
                         container_id,
                         self.feature_key)