コード例 #1
0
def getIp():
        try:
          print('_____________  start  get ip  ____________')
          sleep(3)
          req = get("http://txt.go.sohu.com/ip/soip")
          ip = re.findall(r'\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}',req.text)
          wanIp = ip[0]
          publicIp = 'publicIp'
          print ("pubilc IP: ",ip[0])
          REGISTRY = CollectorRegistry(auto_describe=False)
          client_curl = Gauge("wanIp",name,["wanIp"],registry=REGISTRY)
          client_curl.labels(wanIp).set(0)
          print('_____________________________________________')
          pushurl_addr = pushurl='http://' + pushhostinfo + '/metrics/job/' + job_name + '/instance/' + instance_name
          post(pushurl_addr,data=prometheus_client.generate_latest(REGISTRY),timeout=60,auth=(username,password))

          print('wanIp url : ',  pushurl_addr)
          print('______________  end get ip _______________')
        except:
          traceback.print_exc()
          wanIp = '0.0.0.0'
          print('wanIP: ',wanIp)
          REGISTRY = CollectorRegistry(auto_describe=False)
          client_curl = Gauge("wanIp",name,["wanIp"],registry=REGISTRY)
          client_curl.labels(wanIp).set(0)
          pushurl_addr = pushurl='http://' + pushhostinfo + '/metrics/job/' + job_name + '/instance/' + instance_name
          post(pushurl_addr,data=prometheus_client.generate_latest(REGISTRY),timeout=60,auth=(username,password))

          print ('__________ getIp except_____________________')
コード例 #2
0
    def test_autodescribe_disabled_by_default(self):
        registry = CollectorRegistry()
        self.custom_collector(CounterMetricFamily('c_total', 'help', value=1), registry)
        self.custom_collector(CounterMetricFamily('c_total', 'help', value=1), registry)

        registry = CollectorRegistry(auto_describe=True)
        self.custom_collector(CounterMetricFamily('c_total', 'help', value=1), registry)
        self.assertRaises(ValueError, self.custom_collector, CounterMetricFamily('c_total', 'help', value=1), registry)
コード例 #3
0
    def test_duplicate_metrics_raises(self):
        registry = CollectorRegistry()
        Counter('c_total', 'help', registry=registry)
        self.assertRaises(ValueError, Counter, 'c_total', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'c_total', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'c_created', 'help', registry=registry)

        Gauge('g_created', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'g_created', 'help', registry=registry)
        self.assertRaises(ValueError, Counter, 'g', 'help', registry=registry)

        Summary('s', 'help', registry=registry)
        self.assertRaises(ValueError, Summary, 's', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 's_created', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 's_sum', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 's_count', 'help', registry=registry)
        # We don't currently expose quantiles, but let's prevent future
        # clashes anyway.
        self.assertRaises(ValueError, Gauge, 's', 'help', registry=registry)

        Histogram('h', 'help', registry=registry)
        self.assertRaises(ValueError, Histogram, 'h', 'help', registry=registry)
        # Clashes aggaint various suffixes.
        self.assertRaises(ValueError, Summary, 'h', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'h_count', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'h_sum', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'h_bucket', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'h_created', 'help', registry=registry)
        # The name of the histogram itself is also taken.
        self.assertRaises(ValueError, Gauge, 'h', 'help', registry=registry)

        Info('i', 'help', registry=registry)
        self.assertRaises(ValueError, Gauge, 'i_info', 'help', registry=registry)
コード例 #4
0
    def setUp(self):
        self.temp_dir = TemporaryDirectory()

        host = '0.0.0.0'
        port = get_free_port()
        seed_addr = None
        conf = SyncObjConf(
            fullDumpFile=self.temp_dir.name + '/supervise.zip',
            logCompactionMinTime=300,
            dynamicMembershipChange=True
        )
        data_dir = self.temp_dir.name + '/supervise'
        grpc_port = get_free_port()
        grpc_max_workers = 10
        http_port = get_free_port()
        logger = getLogger(NAME)
        log_handler = StreamHandler()
        logger.setLevel(ERROR)
        log_handler.setLevel(INFO)
        log_format = Formatter('%(asctime)s - %(levelname)s - %(pathname)s:%(lineno)d - %(message)s')
        log_handler.setFormatter(log_format)
        logger.addHandler(log_handler)
        http_logger = getLogger(NAME + '_http')
        http_log_handler = StreamHandler()
        http_logger.setLevel(NOTSET)
        http_log_handler.setLevel(INFO)
        http_log_format = Formatter('%(message)s')
        http_log_handler.setFormatter(http_log_format)
        http_logger.addHandler(http_log_handler)
        metrics_registry = CollectorRegistry()

        self.manager = Manager(host=host, port=port, seed_addr=seed_addr, conf=conf, data_dir=data_dir,
                               grpc_port=grpc_port, grpc_max_workers=grpc_max_workers, http_port=http_port,
                               logger=logger, http_logger=http_logger, metrics_registry=metrics_registry)
コード例 #5
0
 def generate_metrics(self):
     myregistry = CollectorRegistry()
     myregistry.register(self)
     return (
         exposition.generate_latest(myregistry).decode("utf-8"),
         exposition.CONTENT_TYPE_LATEST,
     )
コード例 #6
0
    def setUp(self):
        """Prepare for the tests.

        The summary metric used to track the count and time in the `RQCollector.collect` method
        is automatically registered on the global REGISTRY.

        """
        # Create a registry for testing to replace the global REGISTRY
        self.registry = CollectorRegistry(auto_describe=True)

        # Default Summary class arguments values
        default_args = Summary.__init__.__defaults__

        # Create a similar default values tuple and replace the default `registry` argument with a mock
        # Mocking `prometheus_client.metrics.REGISTRY` doesn't work as expected because default arguments
        # are evaluated at definition time
        new_default_args = tuple(
            self.registry if isinstance(arg, CollectorRegistry) else arg
            for arg in default_args)

        # Patch the default Summary class arguments
        patch('prometheus_client.metrics.Summary.__init__.__defaults__',
              new_default_args).start()

        # On cleanup call patch.stopall
        self.addCleanup(patch.stopall)
コード例 #7
0
 def __init__(self, interval_time):
     self.interval = interval_time
     self.hostname = socket.gethostname()  # 主机名
     self.registry = CollectorRegistry(auto_describe=False)  # prometheus仓库
     self.key_info = dict()
     self.old_recv = dict()
     self.old_sent = dict()
コード例 #8
0
    async def get_metrics_prometheus(self, onlyread=False):
        self.registry = CollectorRegistry()
        for metric_name in self.all_metric:
            # Gauge默认存放在全局register中,且全局register中不能存在相同名称的metric
            metric = Gauge(
                name=metric_name,
                documentation=self.all_metric[metric_name]['describe'],
                labelnames=self.all_metric[metric_name]['labels'],
                registry=self.registry)

            for attr in self.all_metric[metric_name]['data']:
                metric.labels(*attr).set(
                    self.all_metric[metric_name]['data'][attr])

            if not onlyread:
                # 处理拉取数据后逻辑
                if self.all_metric[metric_name][
                        'pull_finish_deal_type'] == 'clear':
                    self.all_metric[metric_name]['data'] = {}
                elif self.all_metric[metric_name][
                        'pull_finish_deal_type'] == 'reset':
                    for attr in self.all_metric[metric_name]['data']:
                        self.all_metric[metric_name]['data'][attr] = 0

        return prometheus_client.generate_latest(self.registry)
コード例 #9
0
def handle_data():
    key_info, net_in, net_out = get_rate(get_key)
    hostname = socket.gethostname()  # 主机名
    REGISTRY = CollectorRegistry(auto_describe=False)
    input = Gauge("network_traffic_input",
                  hostname, ['adapter_name', 'unit', 'ip', 'instance'],
                  registry=REGISTRY)  # 流入
    output = Gauge("network_traffic_output",
                   hostname, ['adapter_name', 'unit', 'ip', 'instance'],
                   registry=REGISTRY)  # 流出

    for key in key_info:
        net_addr = PrintNetIfAddr()
        # 判断网卡不是lo(回环网卡)以及 不是无ip的网卡
        if key != 'lo' and key not in net_addr['no_ip']:
            # 流入和流出
            print(net_addr['本地连接'], net_in.get(key))
            print(net_addr['本地连接'], net_out.get(key))
            input.labels(ip=net_addr[key],
                         adapter_name=key,
                         unit="Byte",
                         instance=hostname).inc(net_in.get(key))
            output.labels(ip=net_addr[key],
                          adapter_name=key,
                          unit="Byte",
                          instance=hostname).inc(net_out.get(key))

    requests.post("http://192.168.3.16:9091/metrics/job/network_traffic",
                  data=prometheus_client.generate_latest(REGISTRY))
    print("发送了一次网卡流量数据")
コード例 #10
0
    def test_restricted_registry(self):
        registry = CollectorRegistry()
        Counter('c_total', 'help', registry=registry)
        Summary('s', 'help', registry=registry).observe(7)

        m = Metric('s', 'help', 'summary')
        m.samples = [Sample('s_sum', {}, 7)]
        self.assertEqual([m], registry.restricted_registry(['s_sum']).collect())
コード例 #11
0
ファイル: prometheus_helper.py プロジェクト: homedepot/metREx
def get_registry(name):
    if name not in collector_registries.keys():
        collector_registries[name] = CollectorRegistry()

        if prometheus_multiproc_dir is not None:
            MultiProcessCollector(collector_registries[name])

    return collector_registries[name]
コード例 #12
0
ファイル: myapp.py プロジェクト: andybrier/aliyun
def requests_count():

    registry = CollectorRegistry()
    g = Gauge('host_error', 'Host Error Happened', ['host'], registry=registry)
    g.labels('www.huawei.com').set(1)
    push_to_gateway('127.0.0.1:9091', job='batchA', registry=registry)

    return "ok"
コード例 #13
0
 def __init__(self,
              namespace='',
              port=8000,
              address='',
              registry=CollectorRegistry()):
     self._namespace = namespace
     self._registry = registry
     self._port = int(port)
     self._address = address
コード例 #14
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.counter = Counter('c_total',
                            'help',
                            labelnames=['l'],
                            registry=self.registry)
     self.two_labels = Counter('two',
                               'help',
                               labelnames=['a', 'b'],
                               registry=self.registry)
コード例 #15
0
 def test_unregister_works(self):
     registry = CollectorRegistry()
     s = Summary('s', 'help', registry=registry)
     self.assertRaises(ValueError,
                       Gauge,
                       's_count',
                       'help',
                       registry=registry)
     registry.unregister(s)
     Gauge('s_count', 'help', registry=registry)
コード例 #16
0
 def setUp(self):
     self.registry = CollectorRegistry()
     self.enum = Enum('e',
                      'help',
                      states=['a', 'b', 'c'],
                      registry=self.registry)
     self.labels = Enum('el',
                        'help', ['l'],
                        states=['a', 'b', 'c'],
                        registry=self.registry)
コード例 #17
0
    def test_target_info_duplicate_detected(self):
        registry = CollectorRegistry(target_info={'foo': 'bar'})
        self.assertRaises(ValueError, Info, 'target', 'help', registry=registry)

        registry.set_target_info({})
        i = Info('target', 'help', registry=registry)
        registry.set_target_info({})
        self.assertRaises(ValueError, Info, 'target', 'help', registry=registry)
        self.assertRaises(ValueError, registry.set_target_info, {'foo': 'bar'})
        registry.unregister(i)
        registry.set_target_info({'foo': 'bar'})
コード例 #18
0
    def test_target_info_restricted_registry(self):
        registry = CollectorRegistry(target_info={'foo': 'bar'})
        Summary('s', 'help', registry=registry).observe(7)

        m = Metric('s', 'help', 'summary')
        m.samples = [Sample('s_sum', {}, 7)]
        self.assertEqual([m], registry.restricted_registry(['s_sum']).collect())

        m = Metric('target', 'Target metadata', 'info')
        m.samples = [Sample('target_info', {'foo': 'bar'}, 1)]
        self.assertEqual([m], registry.restricted_registry(['target_info']).collect())
コード例 #19
0
 def __init__(self, promethus_url, job_name='response_num'):
     self.loop = asyncio.get_event_loop()  # 获取全局轮训器
     self.job = job_name
     self.promethus_url = promethus_url
     self.registry = CollectorRegistry(
     )  # 存放所有Metrics的容器,以Name-Metric(Key-Value)形式维护其中的Metric对象。
     self.requests_total = Gauge(
         'my_metric_name', 'Total response cout of diff error',
         ['type', 'instance'])  # 统计包含一个key(error),使用不同的错误码表示
     self.registry.register(self.requests_total)
     self.push_time = time.time() - 5
コード例 #20
0
def getNetInfo():
        try:
          p = subprocess.Popen(["ipconfig", "/all"], stdout=subprocess.PIPE).communicate()[0].decode("gbk")
          pp = p.replace(' ','')
          print('process ipconfig all output : ',pp)
          ipInfo = pp
          print('ipInfo: ',ipInfo)
          REGISTRY = CollectorRegistry(auto_describe=False)
          client_curl = Gauge("localNetInfo",name,["localNetInfo"],registry=REGISTRY)
          client_curl.labels(ipInfo).set(0)
          pushurl_addr = pushurl='http://' + pushhostinfo + '/metrics/job/' + job_name + '/instance/' + instance_name
          post(pushurl_addr,data=prometheus_client.generate_latest(REGISTRY),timeout=60,auth=(username,password))
        except:
          ipInfo = 'get error ... ... !!! !!!'
          traceback.print_exc()
          print (netInfo)
          REGISTRY = CollectorRegistry(auto_describe=False)
          client_curl = Gauge("localNetInfo",name,["localNetInfo"],registry=REGISTRY)
          client_curl.labels(ipInfo).set(0)
          pushurl_addr = pushurl='http://' + pushhostinfo + '/metrics/job/' + job_name + '/instance/' + instance_name
          post(pushurl_addr,data=prometheus_client.generate_latest(REGISTRY),timeout=60,auth=(username,password))
コード例 #21
0
    def test_deprecation_warning(self):
        os.environ['prometheus_multiproc_dir'] = self.tempdir
        with warnings.catch_warnings(record=True) as w:
            values.ValueClass = get_value_class()
            registry = CollectorRegistry()
            collector = MultiProcessCollector(registry)
            Counter('c', 'help', registry=None)

            assert os.environ['PROMETHEUS_MULTIPROC_DIR'] == self.tempdir
            assert len(w) == 1
            assert issubclass(w[-1].category, DeprecationWarning)
            assert "PROMETHEUS_MULTIPROC_DIR" in str(w[-1].message)
コード例 #22
0
    def test_restricted_registry_does_not_yield_while_locked(self):
        registry = CollectorRegistry(target_info={'foo': 'bar'})
        Summary('s', 'help', registry=registry).observe(7)

        m = Metric('s', 'help', 'summary')
        m.samples = [Sample('s_sum', {}, 7)]
        self.assertEqual([m], list(registry.restricted_registry(['s_sum']).collect()))

        m = Metric('target', 'Target metadata', 'info')
        m.samples = [Sample('target_info', {'foo': 'bar'}, 1)]
        for _ in registry.restricted_registry(['target_info', 's_sum']).collect():
            self.assertFalse(registry._lock.locked())
コード例 #23
0
    def test_restricted_registry_does_not_call_extra(self):
        from unittest.mock import MagicMock
        registry = CollectorRegistry()
        mock_collector = MagicMock()
        mock_collector.describe.return_value = [Metric('foo', 'help', 'summary')]
        registry.register(mock_collector)
        Summary('s', 'help', registry=registry).observe(7)

        m = Metric('s', 'help', 'summary')
        m.samples = [Sample('s_sum', {}, 7)]
        self.assertEqual([m], list(registry.restricted_registry(['s_sum']).collect()))
        mock_collector.collect.assert_not_called()
コード例 #24
0
    def test_roundtrip(self):
        text = """# HELP go_gc_duration_seconds A summary of the GC invocation durations.
# TYPE go_gc_duration_seconds summary
go_gc_duration_seconds{quantile="0"} 0.013300656000000001
go_gc_duration_seconds{quantile="0.25"} 0.013638736
go_gc_duration_seconds{quantile="0.5"} 0.013759906
go_gc_duration_seconds{quantile="0.75"} 0.013962066
go_gc_duration_seconds{quantile="1"} 0.021383540000000003
go_gc_duration_seconds_sum 56.12904785
go_gc_duration_seconds_count 7476.0
# HELP go_goroutines Number of goroutines that currently exist.
# TYPE go_goroutines gauge
go_goroutines 166.0
# HELP prometheus_local_storage_indexing_batch_duration_milliseconds Quantiles for batch indexing duration in milliseconds.
# TYPE prometheus_local_storage_indexing_batch_duration_milliseconds summary
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.5"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.9"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds{quantile="0.99"} NaN
prometheus_local_storage_indexing_batch_duration_milliseconds_sum 871.5665949999999
prometheus_local_storage_indexing_batch_duration_milliseconds_count 229.0
# HELP process_cpu_seconds Total user and system CPU time spent in seconds.
# TYPE process_cpu_seconds counter
process_cpu_seconds_total 29323.4
# HELP process_virtual_memory_bytes Virtual memory size in bytes.
# TYPE process_virtual_memory_bytes gauge
process_virtual_memory_bytes 2478268416.0
# HELP prometheus_build_info A metric with a constant '1' value labeled by version, revision, and branch from which Prometheus was built.
# TYPE prometheus_build_info gauge
prometheus_build_info{branch="HEAD",revision="ef176e5",version="0.16.0rc1"} 1.0
# HELP prometheus_local_storage_chunk_ops The total number of chunk operations by their type.
# TYPE prometheus_local_storage_chunk_ops counter
prometheus_local_storage_chunk_ops_total{type="clone"} 28.0
prometheus_local_storage_chunk_ops_total{type="create"} 997844.0
prometheus_local_storage_chunk_ops_total{type="drop"} 1345758.0
prometheus_local_storage_chunk_ops_total{type="load"} 1641.0
prometheus_local_storage_chunk_ops_total{type="persist"} 981408.0
prometheus_local_storage_chunk_ops_total{type="pin"} 32662.0
prometheus_local_storage_chunk_ops_total{type="transcode"} 980180.0
prometheus_local_storage_chunk_ops_total{type="unpin"} 32662.0
# EOF
"""
        families = list(text_string_to_metric_families(text))

        class TextCollector(object):
            def collect(self):
                return families

        registry = CollectorRegistry()
        registry.register(TextCollector())
        self.assertEqual(text.encode('utf-8'), generate_latest(registry))
コード例 #25
0
ファイル: exporter.py プロジェクト: Jc2k/es-cluster-exporter
async def get_metrics(request):
    async with aiohttp.ClientSession(auth=auth) as session:
        patterns_fut = asyncio.ensure_future(get_patterns(session))
        shards_fut = asyncio.ensure_future(get_shards(session))
        patterns = await patterns_fut
        shards = await shards_fut

    registry = CollectorRegistry()
    registry.register(ElasticsearchCollector(patterns, shards))
    metrics = generate_latest(registry)

    return web.Response(body=metrics,
                        content_type='text/plain',
                        charset='utf-8')
コード例 #26
0
ファイル: prometheus.py プロジェクト: Jc2k/distribd
async def run_prometheus(raft, config, identifier, registry_state,
                         images_directory):
    registry = CollectorRegistry()
    collector = MetricsCollector(raft)
    registry.register(collector)

    return await run_server(
        raft,
        "prometheus",
        config["prometheus"],
        routes,
        identifier=identifier,
        registry_state=registry_state,
        images_directory=images_directory,
        prometheus_registry=registry,
    )
コード例 #27
0
ファイル: export.py プロジェクト: tujia1/python
def ApiResponse():
    checkport = Checkport()
    checkurl = CheckUrl()
    REGISTRY = CollectorRegistry(auto_describe=False)
    mesStatus = Gauge("sszj_port_monitor", "api response stats is:", ["project", "ip", "port"], registry=REGISTRY)
    mesurl = Gauge("sszj_port_url", "api response http time:", ["url"], registry=REGISTRY)
    for datas in checkport:
        project = "".join(datas.get("project"))
        ip = "".join(datas.get("host"))
        port = "".join(datas.get("port"))
        status = datas.get("status")
        for data1 in checkurl:
            url = "".join(data1.get("url"))
            time = data1.get("time")
        mesStatus.labels(project, ip, port).set(status)
        mesurl.labels(url).set(time)
    return Response(prometheus_client.generate_latest(REGISTRY), mimetype="text/plain")
コード例 #28
0
    def setUp(self):
        self.temp_dir = TemporaryDirectory()
        self.example_dir = os.path.normpath(
            os.path.join(os.path.dirname(__file__), '../example'))

        host = '0.0.0.0'
        port = get_free_port()
        seed_addr = None
        conf = SyncObjConf(fullDumpFile=self.temp_dir.name + '/index.zip',
                           logCompactionMinTime=300,
                           dynamicMembershipChange=True)
        data_dir = self.temp_dir.name + '/index'
        grpc_port = get_free_port()
        grpc_max_workers = 10
        http_port = get_free_port()
        logger = getLogger(NAME)
        log_handler = StreamHandler()
        logger.setLevel(ERROR)
        log_handler.setLevel(INFO)
        log_format = Formatter(
            '%(asctime)s - %(levelname)s - %(pathname)s:%(lineno)d - %(message)s'
        )
        log_handler.setFormatter(log_format)
        logger.addHandler(log_handler)
        http_logger = getLogger(NAME + '_http')
        http_log_handler = StreamHandler()
        http_logger.setLevel(NOTSET)
        http_log_handler.setLevel(INFO)
        http_log_format = Formatter('%(message)s')
        http_log_handler.setFormatter(http_log_format)
        http_logger.addHandler(http_log_handler)
        metrics_registry = CollectorRegistry()

        self.indexer = Indexer(host=host,
                               port=port,
                               seed_addr=seed_addr,
                               conf=conf,
                               data_dir=data_dir,
                               grpc_port=grpc_port,
                               grpc_max_workers=grpc_max_workers,
                               http_port=http_port,
                               logger=logger,
                               http_logger=http_logger,
                               metrics_registry=metrics_registry)

        self.channel = grpc.insecure_channel('{0}:{1}'.format(host, grpc_port))
コード例 #29
0
    def __init__(self, config: ClientSettings) -> None:
        self._config = config
        self._log = logging.getLogger(
            f'burp_exporter.client.{self._config.name}')
        self._socket: Optional[ssl.SSLSocket] = None
        self._buf: bytes = b''
        self._connected: bool = False
        self._clients: List[ClientInfo] = list()
        self._ts_last_query: datetime.datetime = datetime.datetime.utcnow(
        ) - datetime.timedelta(seconds=self._config.refresh_interval_seconds)
        self._ts_last_connect_attempt: datetime.datetime = datetime.datetime.utcnow(
        ) - datetime.timedelta(seconds=self._config.refresh_interval_seconds)
        self._parse_errors: int = 0
        # indicates if a query waits for answer
        self._in_flight = False
        self._registry = CollectorRegistry()

        self._registry.register(self)
コード例 #30
0
ファイル: metrics.py プロジェクト: tkdchen/cachito
def init_metrics(app):
    """
    Initialize the Prometheus Flask Exporter.

    :return: a Prometheus Flash Metrics object
    :rtype: PrometheusMetrics
    """
    registry = CollectorRegistry()
    multiproc_temp_dir = app.config["PROMETHEUS_METRICS_TEMP_DIR"]
    hostname = socket.gethostname()

    if not os.path.isdir(multiproc_temp_dir):
        os.makedirs(multiproc_temp_dir)
    multiprocess.MultiProcessCollector(registry, path=multiproc_temp_dir)
    metrics = GunicornInternalPrometheusMetrics.for_app_factory(
        default_labels={"host": hostname}, group_by="endpoint", defaults_prefix="cachito_flask"
    )
    metrics.init_app(app)