def crawl(self, container_id=None, **kwargs):

        c = dockercontainer.DockerContainer(container_id)

        # check image name
        if c.image_name.find("httpd") == -1:

            logger.error("%s is not %s container",
                         c.image_name,
                         self.feature_key)
            raise CrawlError("%s does not have expected name for %s (name=%s)",
                             container_id,
                             self.feature_key,
                             c.image_name)

        # extract IP and Port information
        ip = c.get_container_ip()
        ports = c.get_container_ports()

        # crawl all candidate ports
        for port in ports:
            try:
                metrics = apache_crawler.retrieve_metrics(ip, port)
            except CrawlError:
                logger.error("can't find metrics endpoint at http://%s:%s",
                             ip,
                             port)
                continue
            return [(self.feature_key, metrics, self.feature_type)]

        raise CrawlError("%s has no accessible endpoint for %s",
                         container_id,
                         self.feature_key)
コード例 #2
0
    def crawl(self, container_id=None, **kwargs):

        c = dockercontainer.DockerContainer(container_id)

        port = None

        if "annotation.io.kubernetes.container.ports" in\
                c.inspect['Config']['Labels']:

            ports = c.inspect['Config']['Labels'][
                'annotation.io.kubernetes.container.ports']

            ports = json.loads(ports)

        else:
            ports = c.get_container_ports()

        for each_port in ports:
            tmp_port = None
            if "containerPort" in each_port:
                tmp_port = int(each_port['containerPort'])
            else:
                tmp_port = int(each_port)

            if tmp_port == self.default_port:
                port = tmp_port

        if not port:
            return

        state = c.inspect['State']
        pid = str(state['Pid'])
        ips = run_as_another_namespace(
            pid, ['net'], utils.misc.get_host_ip4_addresses)

        for each_ip in ips:
            if each_ip != "127.0.0.1":
                ip = each_ip
                break
        try:
            metrics = apache_crawler.retrieve_metrics(ip, port)
            return [(self.feature_key, metrics, self.feature_type)]
        except:
            logger.info("apache does not listen on port:%d", port)
            raise ConnectionError("apache does not listen on port:%d", port)
コード例 #3
0
 def test_ok(self):
     status = apache_crawler.retrieve_metrics()
     assert status == ApacheFeature(BusyWorkers='2',
                                    IdleWorkers='9',
                                    waiting_for_connection='9',
                                    starting_up='1',
                                    reading_request='1',
                                    sending_reply='1',
                                    keepalive_read='1',
                                    dns_lookup='1',
                                    closing_connection='1',
                                    logging='1',
                                    graceful_finishing='1',
                                    idle_worker_cleanup='1',
                                    BytesPerSec='1023.13',
                                    BytesPerReq='7037.02',
                                    ReqPerSec='.145393',
                                    Uptime='1183',
                                    Total_kBytes='1182',
                                    Total_Accesses='172')
コード例 #4
0
 def test_ok(self):
     status = apache_crawler.retrieve_metrics()
     assert status == ApacheFeature(
         BusyWorkers='2',
         IdleWorkers='9',
         waiting_for_connection='9',
         starting_up='1',
         reading_request='1',
         sending_reply='1',
         keepalive_read='1',
         dns_lookup='1',
         closing_connection='1',
         logging='1',
         graceful_finishing='1',
         idle_worker_cleanup='1',
         BytesPerSec='1023.13',
         BytesPerReq='7037.02',
         ReqPerSec='.145393',
         Uptime='1183',
         Total_kBytes='1182',
         Total_Accesses='172')
コード例 #5
0
 def test_hundle_parseerror(self):
     with self.assertRaises(CrawlError):
         apache_crawler.retrieve_metrics()
コード例 #6
0
 def test_hundle_parseerror(self):
     with self.assertRaises(CrawlError):
         apache_crawler.retrieve_metrics()
コード例 #7
0
ファイル: apache_host_crawler.py プロジェクト: CCI-MOC/ABMI
 def crawl(self):
     metrics = apache_crawler.retrieve_metrics(host='localhost',
                                               port=self.default_port)
     return [(self.feature_key, metrics, self.feature_type)]
コード例 #8
0
 def crawl(self):
     metrics = apache_crawler.retrieve_metrics(
         host='localhost',
         port=self.default_port
     )
     return [(self.feature_key, metrics, self.feature_type)]