class Exporter:
    def __init__(self):

        # Start and configure prometheus exporter
        start_http_server(9351)

        self.temperature = Gauge('temperature', 'CPU temperature')
        self.temperature.set_function(self.temperatureCallback)

        self.signal = Counter('signals', 'signals received', ['signal'])
        self.signalValues = {
            signal.SIGHUP: 'HUP',
            signal.SIGUSR1: 'USR1',
            signal.SIGUSR2: 'USR2',
        }

    def temperatureCallback(self):
        return self.cmd("sensors -u | grep -2 'Core 0' | "
                        "grep input |  awk '{print $2}'")

    def signalHandler(self, signalnum, _):
        self.signal.labels(self.signalValues[signalnum]).inc()

    def cmd(self, cmd):
        process = subprocess.run(cmd, capture_output=True, shell=True)
        return process.stdout.decode()
class Prometheus(commands.Cog):
    def __init__(self, bot: commands.Bot, registry):
        self.pr_messages = Counter('incidentreporter_messages',
                                   'Total messages',
                                   registry=registry)
        self.pr_commands = Counter('incidentreporter_commands',
                                   'Total commands',
                                   registry=registry)
        self.pr_exceptions = Counter('incidentreporter_exceptions',
                                     'Unhandled exceptions',
                                     registry=registry)
        self.pr_guilds = Gauge('incidentreporter_guilds',
                               'Guilds',
                               registry=registry)
        self.pr_guilds.set_function(lambda: len(bot.guilds))

    @commands.Cog.listener()
    async def on_message(self, message: discord.Message):
        self.pr_messages.inc()

    @commands.Cog.listener()
    async def on_command(self, ctx: commands.Context):
        self.pr_commands.inc()

    @commands.Cog.listener()
    async def on_unhandled_command_error(self, ctx: commands.Context,
                                         exception, error: str):
        self.pr_exceptions.inc()
示例#3
0
def monitor_robot_metrics():
    # services
    nr_services = Gauge("robot_services_total", "Number of services running")
    nr_services.set_function(lambda: len(scol.list_services()))
    # memory
    robot_memory = Gauge('robot_total_memory_bytes', "Memory used by 0-robot")
    robot_memory.set_function(memory_usage_resource())
示例#4
0
class TestGauge(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.gauge = Gauge('g', 'help', registry=self.registry)

    def test_gauge(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(1, self.registry.get_sample_value('g'))
        self.gauge.dec(3)
        self.assertEqual(-2, self.registry.get_sample_value('g'))
        self.gauge.set(9)
        self.assertEqual(9, self.registry.get_sample_value('g'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))

        @self.gauge.track_inprogress()
        def f():
            self.assertEqual(1, self.registry.get_sample_value('g'))

        f()
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        with self.gauge.track_inprogress():
            self.assertEqual(1, self.registry.get_sample_value('g'))
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_gauge_function(self):
        x = {}
        self.gauge.set_function(lambda: len(x))
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(0, self.registry.get_sample_value('g'))
        x['a'] = None
        self.assertEqual(1, self.registry.get_sample_value('g'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))

        @self.gauge.time()
        def f():
            time.sleep(.001)

        f()
        self.assertNotEqual(0, self.registry.get_sample_value('g'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        with self.gauge.time():
            time.sleep(.001)
        self.assertNotEqual(0, self.registry.get_sample_value('g'))
示例#5
0
class TestGauge(unittest.TestCase):
    def setUp(self):
        self.registry = CollectorRegistry()
        self.gauge = Gauge('g', 'help', registry=self.registry)

    def test_gauge(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(1, self.registry.get_sample_value('g'))
        self.gauge.dec(3)
        self.assertEqual(-2, self.registry.get_sample_value('g'))
        self.gauge.set(9)
        self.assertEqual(9, self.registry.get_sample_value('g'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))

        @self.gauge.track_inprogress()
        def f():
            self.assertEqual(1, self.registry.get_sample_value('g'))

        f()
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        with self.gauge.track_inprogress():
            self.assertEqual(1, self.registry.get_sample_value('g'))
        self.assertEqual(0, self.registry.get_sample_value('g'))

    def test_gauge_function(self):
        x = {}
        self.gauge.set_function(lambda: len(x))
        self.assertEqual(0, self.registry.get_sample_value('g'))
        self.gauge.inc()
        self.assertEqual(0, self.registry.get_sample_value('g'))
        x['a'] = None
        self.assertEqual(1, self.registry.get_sample_value('g'))

    def test_function_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))

        @self.gauge.time()
        def f():
            time.sleep(.001)

        f()
        self.assertNotEqual(0, self.registry.get_sample_value('g'))

    def test_block_decorator(self):
        self.assertEqual(0, self.registry.get_sample_value('g'))
        with self.gauge.time():
            time.sleep(.001)
        self.assertNotEqual(0, self.registry.get_sample_value('g'))
def register_prometheus_gauges(export_internal_raspberry=False):
    g = Gauge("sensor_temperature_in_celsius", "Local room temperature around the raspberry pi", ["sensor"])
    error_g = Gauge("faulty_sensor_read", "Is 1 if the sensor could not be read.", ["sensor"])
    sensors = find_sensors()
    print "Found sensors:", ", ".join(map(lambda x: str(x), sensors))
    for sensor in sensors:
        g.labels(str(sensor)).set_function(sensor)
        sensor.set_error_gauge(error_g.labels(str(sensor)))
    if export_internal_raspberry:
        g = Gauge("cpu_temperature_in_celsius", "CPU Temperature of the Raspberry Pi")
        g.set_function(read_raspberry_pi_temperature)
    return sensors
示例#7
0
def prom_export_metrics(server_ip, server_port):
    metric = Gauge('bandwidth_measure', 'Current network bandwidth')
    metric.set_function(
        lambda: bandwidth_measure_metric(server_ip, server_port))

    #To run as process
    @metric.time()
    def process_request(t):
        """A dummy function that takes some time."""
        time.sleep(t)

    while True:
        process_request(random.random())
示例#8
0
    """
    id = db.Column(UUID(as_uuid=True), default=uuid4, primary_key=True)
    name = db.Column(db.String)
    wongles = db.Column(db.Integer)
    waggles = db.Column(db.Integer)


# prometheus instruments
WIDGET_LIST_TIME = Histogram('widget_get_seconds',
                             'Time spent getting a widget')
WIDGET_REQUEST_ERRORS = Counter('widget_request_errors',
                                'Errors processing widget requests',
                                ['method', 'endpoint'])
WIDGET_COUNT = Gauge('widget_count', 'Number of widgets in the database')
WIDGET_COUNT.set_function(lambda: db.session.query(Widget).count())


class WidgetList(Resource):
    """
    API resource to list widgets (get) or create a new widget (post)

    """
    @WIDGET_LIST_TIME.time()
    def get(self):
        widgets = db.session.query(Widget).all()
        serialized = [{
            'id': str(widget.id),
            'name': widget.name,
            'wongles': widget.wongles,
            'waggles': widget.waggles
示例#9
0
class SBQRServer(NetworkServer[SBClient]):
    hosts: Dict[Address, GameHost]
    qr_socket: socket.socket
    sb_socket: socket.socket

    _metric_games_concurrent: Gauge
    _metric_games_concurrent_by_game: Dict[str, Gauge] = {}
    _metric_games_total: Counter
    _metric_games_total_by_game: Dict[str, Gauge] = {}

    def __init__(self):
        super().__init__("civgs_", "Civilization 4 GameSpy Lobby server")
        self.hosts = {}  # key = ip:port; value = other stuff
        self.qr_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        try:
            self.qr_socket.bind(("", 27900))
        except socket.error as err:
            logging.error("Bind failed for qr socket (UDP 27900): %s", err)
        self.qr_socket.setblocking(0)
        # We dont use register_server here, this is a special UDP handler that doesn't accept / create clients etc.
        self._server_socket_handlers[self.qr_socket] = self.handle_qr

        self.sb_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sb_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        try:
            self.sb_socket.bind(("", 28910))
            self.sb_socket.listen(5)
            self.sb_socket.setblocking(0)
        except socket.error as err:
            logging.error("Bind/listen failed for sb socket (TCP 28910): %s",
                          err)
        self.register_server(self.sb_socket, SBClient)

        self._metric_games_concurrent = Gauge(
            "civgs_games_concurrent",
            "Number of open games in the Civ4 gamebrowser",
            labelnames=("game", ),
        )
        self._metric_games_concurrent.set_function(lambda: len(self.hosts))
        self._metric_games_total = Counter(
            "civgs_games_total",
            "Number of games created in the Civ4 gamebrowser",
            labelnames=("game", ),
        )

    def metric_bump(self, host: GameHost) -> None:
        logger.info(f"new game: {host}")

        game = host.game
        if game in self._metric_games_total_by_game:
            self._metric_games_total_by_game[game].inc()
            return
        self._metric_games_concurrent_by_game[
            game] = self._metric_games_concurrent.labels(game=game)

        def c(g: Optional[str] = game) -> int:
            return sum(1 if g == h.game else 0 for h in self.hosts.values())

        self._metric_games_concurrent_by_game[game].set_function(c)

        self._metric_games_total_by_game[
            game] = self._metric_games_total.labels(game=game)
        self._metric_games_total_by_game[game].inc()

    def log_hostlist(self, log=logger.debug) -> None:
        log("hostlist of server...")
        for index, (_, host) in enumerate(self.hosts.items()):
            log("[{}] {}:{} ({!r}) {}".format(index, host.ip, host.port,
                                              host.sessionid,
                                              host.last_activity))
            log(host.data)
        log("... end of hostlist")

    def qr_forw_to(self, rawdata: bytes) -> None:
        if rawdata[9:15] == b"\xfd\xfc\x1e\x66\x6a\xb2":
            ip = (str(rawdata[3]) + "." + str(rawdata[4]) + "." +
                  str(rawdata[5]) + "." + str(rawdata[6]))
            port = rawdata[7] * 256 + rawdata[8]
            if (ip, port) in self.hosts:
                logging.debug("forwarding to existing host")
            else:
                logging.debug("forwarding to unknown address")
            resp = b"\xfe\xfd\x06"
            if (ip, port) in self.hosts:
                resp += self.hosts[(ip, port)].sessionid
            else:
                resp += b"\x00" * 4
            # random cookie here
            resp += bytes((random.randrange(0, 256)) for _ in range(4))
            resp += rawdata[9:]
            self.qr_send_to(resp, (ip, port), "qrforwto")
        else:
            logging.warning("wrong data to forward")

    @staticmethod
    def qr_parse03(raw: bytes) -> Dict[str, str]:
        prepared = raw[5:].split(b"\x00\x00\x00")[0].split(b"\x00")
        if len(prepared) % 2 == 1:
            logging.warning("Could not correctly parse03: %s", prepared)
        cooked = [(
            prepared[i].decode(errors="ignore"),
            prepared[i + 1].decode(errors="ignore"),
        ) for i in range(0,
                         len(prepared) - 1, 2)]
        return dict(cooked)

    def qr_send_to(self, resp: bytes, address: Address, location: str) -> None:
        try:
            self.qr_socket.sendto(resp, address)
        except socket.error as err:
            logging.error("Socket error on location %s: %s", location, err)

    def handle_qr(self) -> None:
        recv_data, addr = self.qr_socket.recvfrom(1024)
        if len(recv_data) > 0:
            self.process_qr(recv_data, addr)

    def process_qr(self, recv_data: bytes, address: Address) -> None:
        logging.debug("process_qr address: %s", address)
        if (recv_data[0] == 0x09 and len(recv_data) >=
                5):  # 09,4xUid,'civ4bts','0'  - game checks if qr is up
            resp = b"\xfe\xfd\x09" + recv_data[1:5] + b"\x00"
            self.qr_send_to(resp, address, "09")
        elif recv_data[0] == 0x08 and len(recv_data) >= 5:  # 08 4xuid - ping
            resp = b"\xfe\xfd\x08" + recv_data[1:5]
            self.qr_send_to(resp, address, "08")
        elif recv_data[0] == 0x07 and len(
                recv_data) >= 5:  # 06 ACK - no response
            # TODO debug output this thing
            # hexprint(recv_data)
            pass
        elif recv_data[0] == 0x01 and len(
                recv_data) >= 5:  # resp to our challenge
            resp = b"\xfe\xfd\x0a" + recv_data[1:5]
            self.qr_send_to(resp, address, "01")
        elif recv_data[0] == 0x03 and len(recv_data) >= 5:
            parsed = SBQRServer.qr_parse03(recv_data)
            statechanged = int(parsed.get("statechanged", "0"))
            if statechanged == 3:
                if address in self.hosts:
                    del self.hosts[address]
                self.hosts[address] = GameHost(*address)
                self.hosts[address].sessionid = recv_data[1:5]
                self.hosts[address].data = parsed
                self.metric_bump(self.hosts[address])
                resp = b"\xfe\xfd\x01" + recv_data[1:5] + gs_consts.ghchal
                self.qr_send_to(resp, address, "03-3")
                self.sb_sendpush02(self.hosts[address])
            elif statechanged == 2:
                if address in self.hosts:
                    self.sb_senddel04(address)
                    del self.hosts[address]
            elif statechanged == 1:
                if address in self.hosts:
                    self.hosts[address].data = parsed
                    self.hosts[address].refresh()
                    self.sb_sendpush02(self.hosts[address])
            elif statechanged == 0:
                if address in self.hosts:
                    self.hosts[address].refresh()

        self.log_hostlist()

    def sb_sendpush02(self, host: GameHost) -> None:
        msg = b"\x02"
        flags = 0
        flags_buffer = b""
        if len(host.data) != 0:
            flags |= gs_consts.UNSOLICITED_UDP_FLAG
            flags |= gs_consts.HAS_KEYS_FLAG
            if "natneg" in host.data:
                flags |= gs_consts.CONNECT_NEGOTIATE_FLAG
                flags |= gs_consts.NONSTANDARD_PORT_FLAG
                flags |= gs_consts.PRIVATE_IP_FLAG  # ?
                flags |= gs_consts.NONSTANDARD_PRIVATE_PORT_FLAG  # ?
        msg += byteencode.uint8(flags)
        flags_buffer += byteencode.ipaddr(host.ip)
        flags_buffer += byteencode.uint16(host.port)
        localips: List[str] = []
        for key1, value1 in host.data.items():
            if key1.startswith("localip"):
                localips.append(value1)
        if len(localips) == 1:
            localip = localips[0]
            logging.debug("sb_sendpush02, single localip: %s", localip)
        elif not localips:
            logging.warning("sb_sendpush02: Missing localips, using fake")
            localip = "127.0.0.1"
        else:
            localip = random.choice(localips)
            logging.info(
                "sb_sendpush02: Multiple localips: %s, using random one: %s",
                localips,
                localip,
            )
        flags_buffer += byteencode.ipaddr(localip)
        flags_buffer += byteencode.uint16(int(host.data.get("localport",
                                                            6500)))
        msg += flags_buffer
        for field in gs_consts.defaultfields:
            msg += host.data[field].encode(errors="ignore") + b"\x00"
        msg += b"\x01"
        l = byteencode.uint16(len(msg) + 2)
        msg = l + msg
        # iterate through SBClients and make a message for each
        logging.debug(
            "Sending info about host %s to %d clients",
            host,
            len(self._clients_by_socket),
        )
        for client in self._clients_by_socket.values():
            client.write(msg)

    def sb_senddel04(self, address: Address) -> None:
        msg = b"\x00\x09\x04"
        msg += byteencode.ipaddr(address[0])
        msg += byteencode.uint16(address[1])
        for client in self._clients_by_socket.values():
            client.write(msg)

    def run(self, hostlist_interval=None) -> None:
        logging.info("Server ready, waiting for connections.")
        last_aliveness_check = time.time()
        last_hostlist = time.time()
        while True:
            self.select()
            now = time.time()
            if last_aliveness_check + 10 < now:
                # Need to copy here because we may be modifying the dict
                for client in list(self._clients_by_socket.values()):
                    client.check_aliveness()
                last_aliveness_check = now
            if (hostlist_interval is not None
                    and last_hostlist + hostlist_interval < now):
                self.log_hostlist(logger.info)
                last_hostlist = now
示例#10
0
def register_prometheus_gauges(m_func):
    upstream_snr = Gauge('upstream_snr', 'Upstream Signal to Noise ratio (dB)')
    upstream_snr.set_function(lambda: m_func()['upstream_snr'])

    upstream_current_rate = Gauge('upstream_current_rate',
                                  'Upstream rate (kbps)')
    upstream_current_rate.set_function(
        lambda: m_func()['upstream_current_rate'])

    upstream_crc_errors = Gauge('upstream_crc_errors', 'Upstream CRC errors')
    upstream_crc_errors.set_function(lambda: m_func()['upstream_crc_errors'])

    upstream_fec_errors = Gauge('upstream_fec_errors', 'Upstream FEC errors')
    upstream_fec_errors.set_function(lambda: m_func()['upstream_fec_errors'])

    upstream_attenuation = Gauge('upstream_attenuation',
                                 'Upstream attenuation (dB)')
    upstream_attenuation.set_function(lambda: m_func()['upstream_attenuation'])

    downstream_snr = Gauge('downstream_snr',
                           'Downstream Signal to Noise ratio (dB)')
    downstream_snr.set_function(lambda: m_func()['downstream_snr'])

    downstream_current_rate = Gauge('downstream_current_rate',
                                    'Downstream rate (kbps)')
    downstream_current_rate.set_function(
        lambda: m_func()['downstream_current_rate'])

    downstream_attenuation = Gauge('downstream_attenuation',
                                   'Downstream attenuation (dB)')
    downstream_attenuation.set_function(
        lambda: m_func()['downstream_attenuation'])

    downstream_crc_errors = Gauge('downstream_crc_errors',
                                  'Downstream CRC errors')
    downstream_crc_errors.set_function(
        lambda: m_func()['downstream_crc_errors'])

    downstream_fec_errors = Gauge('downstream_fec_errors',
                                  'Downstream FEC errors')
    downstream_fec_errors.set_function(
        lambda: m_func()['downstream_fec_errors'])

    status = Gauge('status', 'Connection status, 0 for down, 1 for up')
    status.set_function(lambda: m_func()['status'])

    connection_type = Gauge('connection_type',
                            'Connection type, 0 for ADSL, 1 for VDSL')
    connection_type.set_function(lambda: m_func()['connection_type'])
示例#11
0
        subnets.append(IPv4Network(row[0]))


def generate_per_scope():
    used_ips = 0
    for network in subnets:
        for lease in leases.get():
            if lease.valid and lease.active:
                if IPv4Address(lease.ip) in network:
                    used_ips = used_ips + 1
        USAGE_PER_SCOPE.labels(network).set(used_ips)
        SIZE_PER_SCOPE.labels(network).set(network.num_addresses - 2)
        used_ips = 0


TOTAL_LEASES.set_function(lambda: len(leases.get()))
TOTAL_CURRENT.set_function(lambda: len(leases.get_current().keys()))

# Start HTTP server
start_http_server(8000)

while True:
    # Instantiate and parse DHCPD leases file
    leases = IscDhcpLeases(dhcpd_leases)

    TOTAL_LEASES.set_function(lambda: len(leases.get()))
    TOTAL_CURRENT.set_function(lambda: len(leases.get_current()))

    generate_per_scope()

    time.sleep(5)
示例#12
0
import time
from prometheus_client import Gauge

TIME = Gauge('time_seconds', 'The current time.')
TIME.set_function(lambda: time.time())
示例#13
0
class VLCStreamer(AbstractRadioEmitter):
    def __init__(self, config):
        self.config = config

        self.is_playing = False
        self.now_playing = None
        self.song_start_time = 0
        self.core = None

        # noinspection PyArgumentList
        self.mon_is_playing = Gauge('dj_is_playing', 'Is something paying now')
        self.mon_is_playing.set_function(lambda: 1 if self.is_playing else 0)

        self.vlc_instance = vlc.Instance()
        self.player = self.vlc_instance.media_player_new()

    def bind_core(self, core):
        self.core = core

    def get_name(self):
        return "vlc"

    def cleanup(self):
        self.player.stop()

    def stop(self):
        self.player.stop()
        self.is_playing = False
        self.now_playing = None

    def switch_track(self, track: Song):
        uri = track.media
        vlc_options = self.config.get("streamer_vlc", "vlc_options")
        media = self.vlc_instance.media_new(uri, vlc_options, "sout-keep")
        self.player.set_media(media)
        self.player.play()
        self.is_playing = True
        self.now_playing = track
        self.song_start_time = time.time()


# found example
# import vlc
# finish = 0

# def SongFinished(event):
#     global finish
#     print "Event reports - finished"
#     finish = 1

# instance = vlc.Instance()
# player = instance.media_player_new()
# media = instance.media_new_path('vp1.mp3') #Your audio file here
# player.set_media(media)
# events = player.event_manager()
# events.event_attach(vlc.EventType.MediaPlayerEndReached, SongFinished)
# player.play()
# while finish == 0:
#         sec = player.get_time() / 1000
#         m, s = divmod(sec, 60)
#         print "%02d:%02d" % (m,s)

# class EventType(_Enum)
#  |  Event types.
#  |
#  |  Method resolution order:
#  |      EventType
#  |      _Enum
#  |      ctypes.c_uint
#  |      _ctypes._SimpleCData
#  |      _ctypes._CData
#  |      builtins.object
#  |
#  |  Data and other attributes defined here:
#  |
#  |  MediaDiscovererEnded = vlc.EventType.MediaDiscovererEnded
#  |
#  |  MediaDiscovererStarted = vlc.EventType.MediaDiscovererStarted
#  |
#  |  MediaDurationChanged = vlc.EventType.MediaDurationChanged
#  |
#  |  MediaFreed = vlc.EventType.MediaFreed
#  |
#  |  MediaListEndReached = vlc.EventType.MediaListEndReached
#  |
#  |  MediaListItemAdded = vlc.EventType.MediaListItemAdded
#  |
#  |  MediaListItemDeleted = vlc.EventType.MediaListItemDeleted
#  |
#  |  MediaListPlayerNextItemSet = vlc.EventType.MediaListPlayerNextItemSet
#  |
#  |  MediaListPlayerPlayed = vlc.EventType.MediaListPlayerPlayed
#  |
#  |  MediaListPlayerStopped = vlc.EventType.MediaListPlayerStopped
#  |
#  |  MediaListViewItemAdded = vlc.EventType.MediaListViewItemAdded
#  |
#  |  MediaListViewItemDeleted = vlc.EventType.MediaListViewItemDeleted
#  |
#  |  MediaListViewWillAddItem = vlc.EventType.MediaListViewWillAddItem
#  |
#  |  MediaListViewWillDeleteItem = vlc.EventType.MediaListViewWillDeleteIte...
#  |
#  |  MediaListWillAddItem = vlc.EventType.MediaListWillAddItem
#  |
#  |  MediaListWillDeleteItem = vlc.EventType.MediaListWillDeleteItem
#  |
#  |  MediaMetaChanged = vlc.EventType.MediaMetaChanged
#  |
#  |  MediaParsedChanged = vlc.EventType.MediaParsedChanged
#  |
#  |  MediaPlayerAudioDevice = vlc.EventType.MediaPlayerAudioDevice
#  |
#  |  MediaPlayerAudioVolume = vlc.EventType.MediaPlayerAudioVolume
#  |
#  |  MediaPlayerBackward = vlc.EventType.MediaPlayerBackward
#  |
#  |  MediaPlayerBuffering = vlc.EventType.MediaPlayerBuffering
#  |
#  |  MediaPlayerChapterChanged = vlc.EventType.MediaPlayerChapterChanged
#  |
#  |  MediaPlayerCorked = vlc.EventType.MediaPlayerCorked
#  |
#  |  MediaPlayerESAdded = vlc.EventType.MediaPlayerESAdded
#  |
#  |  MediaPlayerESDeleted = vlc.EventType.MediaPlayerESDeleted
#  |
#  |  MediaPlayerESSelected = vlc.EventType.MediaPlayerESSelected
#  |
#  |  MediaPlayerEncounteredError = vlc.EventType.MediaPlayerEncounteredErro...
#  |
#  |  MediaPlayerEndReached = vlc.EventType.MediaPlayerEndReached
#  |
#  |  MediaPlayerForward = vlc.EventType.MediaPlayerForward
#  |
#  |  MediaPlayerLengthChanged = vlc.EventType.MediaPlayerLengthChanged
#  |
#  |  MediaPlayerMediaChanged = vlc.EventType.MediaPlayerMediaChanged
#  |
#  |  MediaPlayerMuted = vlc.EventType.MediaPlayerMuted
#  |
#  |  MediaPlayerNothingSpecial = vlc.EventType.MediaPlayerNothingSpecial
#  |
#  |  MediaPlayerOpening = vlc.EventType.MediaPlayerOpening
#  |
#  |  MediaPlayerPausableChanged = vlc.EventType.MediaPlayerPausableChanged
#  |
#  |  MediaPlayerPaused = vlc.EventType.MediaPlayerPaused
#  |
#  |  MediaPlayerPlaying = vlc.EventType.MediaPlayerPlaying
#  |
#  |  MediaPlayerPositionChanged = vlc.EventType.MediaPlayerPositionChanged
#  |
#  |  MediaPlayerScrambledChanged = vlc.EventType.MediaPlayerScrambledChange...
#  |
#  |  MediaPlayerSeekableChanged = vlc.EventType.MediaPlayerSeekableChanged
#  |
#  |  MediaPlayerSnapshotTaken = vlc.EventType.MediaPlayerSnapshotTaken
#  |
#  |  MediaPlayerStopped = vlc.EventType.MediaPlayerStopped
#  |
#  |  MediaPlayerTimeChanged = vlc.EventType.MediaPlayerTimeChanged
#  |
#  |  MediaPlayerTitleChanged = vlc.EventType.MediaPlayerTitleChanged
#  |
#  |  MediaPlayerUncorked = vlc.EventType.MediaPlayerUncorked
#  |
#  |  MediaPlayerUnmuted = vlc.EventType.MediaPlayerUnmuted
#  |
#  |  MediaPlayerVout = vlc.EventType.MediaPlayerVout
#  |
#  |  MediaStateChanged = vlc.EventType.MediaStateChanged
#  |
#  |  MediaSubItemAdded = vlc.EventType.MediaSubItemAdded
#  |
#  |  MediaSubItemTreeAdded = vlc.EventType.MediaSubItemTreeAdded
#  |
#  |  RendererDiscovererItemAdded = vlc.EventType.RendererDiscovererItemAdde...
#  |
#  |  RendererDiscovererItemDeleted = vlc.EventType.RendererDiscovererItemDe...
#  |
#  |  VlmMediaAdded = vlc.EventType.VlmMediaAdded
#  |
#  |  VlmMediaChanged = vlc.EventType.VlmMediaChanged
#  |
#  |  VlmMediaInstanceStarted = vlc.EventType.VlmMediaInstanceStarted
#  |
#  |  VlmMediaInstanceStatusEnd = vlc.EventType.VlmMediaInstanceStatusEnd
#  |
#  |  VlmMediaInstanceStatusError = vlc.EventType.VlmMediaInstanceStatusErro...
#  |
#  |  VlmMediaInstanceStatusInit = vlc.EventType.VlmMediaInstanceStatusInit
#  |
#  |  VlmMediaInstanceStatusOpening = vlc.EventType.VlmMediaInstanceStatusOp...
#  |
#  |  VlmMediaInstanceStatusPause = vlc.EventType.VlmMediaInstanceStatusPaus...
#  |
#  |  VlmMediaInstanceStatusPlaying = vlc.EventType.VlmMediaInstanceStatusPl...
#  |
#  |  VlmMediaInstanceStopped = vlc.EventType.VlmMediaInstanceStopped
#  |
#  |  VlmMediaRemoved = vlc.EventType.VlmMediaRemoved
#  |
#  |  __ctype_be__ = <class 'vlc.EventType'>
#  |      Event types.
#  |
#  |  __ctype_le__ = <class 'vlc.EventType'>
#  |      Event types.
#  |
#  |  ----------------------------------------------------------------------
#  |  Methods inherited from _Enum:
#  |
#  |  __eq__(self, other)
#  |      Return self==value.
#  |
#  |  __hash__(self)
#  |      Return hash(self).
#  |
#  |  __ne__(self, other)
#  |      Return self!=value.
#  |
#  |  __repr__(self)
#  |      Return repr(self).
#  |
#  |  __str__(self)
#  |      Return str(self).
#  |
#  |  ----------------------------------------------------------------------
#  |  Data descriptors inherited from ctypes.c_uint:
#  |
#  |  __dict__
#  |      dictionary for instance variables (if defined)
#  |
#  |  __weakref__
#  |      list of weak references to the object (if defined)
#  |
#  |  ----------------------------------------------------------------------
#  |  Methods inherited from _ctypes._SimpleCData:
#  |
#  |  __bool__(self, /)
#  |      self != 0
#  |
#  |  __ctypes_from_outparam__(...)
#  |
#  |  __init__(self, /, *args, **kwargs)
#  |      Initialize self.  See help(type(self)) for accurate signature.
#  |
#  |  __new__(*args, **kwargs) from _ctypes.PyCSimpleType
#  |      Create and return a new object.  See help(type) for accurate signature.
#  |
#  |  ----------------------------------------------------------------------
#  |  Data descriptors inherited from _ctypes._SimpleCData:
#  |
#  |  value
#  |      current value
#  |
#  |  ----------------------------------------------------------------------
#  |  Methods inherited from _ctypes._CData:
#  |
#  |  __reduce__(...)
#  |      helper for pickle
#  |
#  |  __setstate__(...)
# (END)
# Decorate function with metric.
@REQUEST_TIME.time()
def process_request(t):
    """A dummy function that takes some time."""
    time.sleep(t)


def random_walk(metric):
    metrics[metric] += math.sin(
        (random.randint(0, 9999) / 10000.0) * math.pi * 2.0)
    return metrics[metric]


if __name__ == '__main__':
    pew = Gauge('pew', 'Prometheus metric')
    pew.set_function(lambda: random_walk('pew'))
    mew = Gauge('mew', 'Prometheus metric')
    mew.set_function(lambda: random_walk('mew'))
    dew = Gauge('dew', 'Prometheus metric')
    dew.set_function(lambda: random_walk('dew'))
    few = Gauge('few', 'Prometheus metric')
    few.set_function(lambda: random_walk('few'))
    new = Gauge('new', 'Prometheus metric')
    new.set_function(lambda: random_walk('new'))
    sew = Gauge('sew', 'Prometheus metric')
    sew.set_function(lambda: random_walk('sew'))
    # Start up the server to expose the metrics.
    start_http_server(8000)
    # Generate some requests.
    while True:
        process_request(random.random())
示例#15
0
文件: app.py 项目: arska/imapflagged
    annotate the processing start time to each flask request
    """
    request.start_time = time.time()


def after_request(response):
    """
    after returning the request calculate metrics about this request
    """
    # time can go backwards...
    request_latency = max(time.time() - request.start_time, 0)
    # pylint: disable-msg=no-member
    FLASK_REQUEST_LATENCY.labels(request.method,
                                 request.path).observe(request_latency)
    FLASK_REQUEST_SIZE.labels(request.method, request.path,
                              response.status_code).set(len(response.data))
    FLASK_REQUEST_COUNT.labels(request.method, request.path,
                               response.status_code).inc()
    return response


if __name__ == "__main__":
    # load settings from .env for development
    load_dotenv()
    # wire the prometheus metric to the index() function above
    FLAGGED_ITEMS.set_function(index)

    APP.before_request(before_request)
    APP.after_request(after_request)
    APP.run(host="0.0.0.0", port=os.environ.get("listenport", 8080))
示例#16
0
    def add_quota_remaining(self):
        g = Gauge("aa_quota_remaining_bytes", "AAISP quota remaining (bytes)")
        g.set_function(aa_prometheus.quota.remaining)

        self.metrics.append(g)
示例#17
0
    description="Electricity company price Prometheus exporter v" +
    str(version))
parser.add_argument("-p",
                    "--port",
                    metavar="<port>",
                    required=False,
                    help="Port for listenin",
                    default=8111,
                    type=int)
args = parser.parse_args()

listen_port = args.port
ELECTRICITY_PRICE_ENDESA = Gauge('electricity_price_endesa_kwh_eur_price',
                                 'endesa kwh euro price')

ELECTRICITY_PRICE_ENDESA.set_function(lambda: get_endesa_price())


def get_endesa_price():
    nowDate = strftime("%Y-%m-%d", gmtime())
    data = {u'currentDate': nowDate, 'currentRate': 'GEN'}
    headers = {
        u"Content-Type": "application/x-www-form-urlencoded",
        "x-requested-with": "XMLHttpRequest"
    }
    r = requests.post(
        "https://www.endesaclientes.com/sites/Satellite/?pagename=SiteEntry_IB_ES/LandingPrice/GetPrices",
        headers=headers,
        data=data)
    pos = r.text.find("var actualPrice = ") + 19
    parsed = r.text[pos:]
示例#18
0
class QueueManager:
    def __init__(self, config):
        """
        :param configparser.ConfigParser config:
        """
        self.config = config
        self.logger = logging.getLogger("tg_dj.queueManager")
        self.logger.setLevel(
            getattr(
                logging,
                self.config.get("queue_manager",
                                "verbosity",
                                fallback="warning").upper()))

        self.is_media_playing = False
        self.playlists: Dict[UID, List[Song]] = {}
        self.queue: List[UID] = []
        self.backlog: List[Song] = []
        self.backlog_played: List[Song] = []
        self.backlog_played_media: List[str] = []

        self.playlists[-1] = []  # For tracks managed by admins

        # noinspection PyArgumentList
        self.mon_queue_len = Gauge('dj_queue_length', 'Queue length')
        self.mon_queue_len.set_function(lambda: len(self.queue))
        # noinspection PyArgumentList
        self.mon_playlist_len = Gauge('dj_playlist_length', 'Playlist length')
        self.mon_playlist_len.set_function(
            lambda: sum(len(self.playlists[i]) for i in self.playlists))
        # noinspection PyArgumentList
        self.mon_backlog_len = Gauge('dj_backlog_length', 'Backlog length')
        self.mon_backlog_len.set_function(lambda: len(self.backlog))

        self.lock = threading.Lock()
        self.load_init()
        self.populate_backlog()

    def load_init(self):
        try:
            queue_file = self.config.get("queue_manager",
                                         "queue_file",
                                         fallback="queue.json")
            with open(queue_file) as f:
                data = json.loads(f.read())
                self.queue = data["queue"]
                Song.counter = data["last_id"]

                for pl in data["playlists"]:
                    user_id = pl["user_id"]
                    self.playlists[user_id] = []
                    for d in pl["tracks"]:
                        track = Song.from_dict(d)
                        self.playlists[user_id].append(track)

                try:
                    self.backlog_played_media = data["backlog_played_media"]
                except KeyError:
                    self.backlog_played_media = data["backlog_already_played"]
        except (FileNotFoundError, ValueError) as _:
            pass

    def populate_backlog(self):
        path = os.path.abspath(
            self.config.get("queue_manager",
                            "fallback_media_dir",
                            fallback="media_fallback"))
        files = get_files_in_dir(path)
        add_to_end = []
        for file in files:
            file_path = os.path.join(path, file)
            try:
                title, artist, duration = get_mp3_info(file_path)
            except HeaderNotFoundError as e:
                self.logger.warning(
                    f"Not loading {file} because it does not look like mp3")
                continue
            title = remove_links(title)
            artist = remove_links(artist)
            if file_path in self.backlog_played_media:
                add_to_end.append(Song(file_path, title, artist, duration, -1))
            else:
                self.backlog.append(
                    Song(file_path, title, artist, duration, -1))

        random.shuffle(self.backlog)
        random.shuffle(add_to_end)
        self.backlog += add_to_end

        self.logger.info("Fallback playlist length: %d " % len(self.backlog))

    def cleanup(self):
        out_dict = {
            "last_id":
            Song.counter,
            "queue":
            self.queue,
            "playlists": [{
                "user_id": user_id,
                "tracks": [a.to_dict() for a in tracks],
            } for user_id, tracks in self.playlists.items()],
            "backlog_played_media": [a.media for a in self.backlog_played]
        }
        queue_file = self.config.get("queue_manager",
                                     "queue_file",
                                     fallback="queue.json")
        with open(queue_file, "w") as f:
            f.write(json.dumps(out_dict, ensure_ascii=False))
            self.logger.info("Queue has been saved to file \"%s\"" %
                             queue_file)

    # Tracks manipulations

    def add_track(self, path: str, title: str, artist: str, duration: int,
                  user_id: int) -> Song:
        with self.lock:
            track = Song(path, title, artist, duration, user_id)

            if user_id not in self.playlists:
                self.playlists[user_id] = []

            self.playlists[user_id].append(track)

            return track

    def remove_track(self, tid: int) -> int:
        with self.lock:
            track = self.get_track(tid)
            local_pos, global_pos = self.get_track_position(track)
            if track is not None:
                user_id = track.user_id
                self.playlists[user_id].remove(track)
                if len(self.playlists[user_id]) == 0:
                    self.queue.remove(user_id)
                self.logger.info("Playing track from main queue: %s",
                                 track.title)
            else:
                self.logger.warning(
                    "Unable to remove track #%d from the playlist" % tid)
            return global_pos

    def raise_track(self, tid: int):
        with self.lock:
            track = self.get_track(tid)
            user_id = track.user_id
            if track is not None:
                self.playlists[user_id].remove(track)
                self.playlists[user_id].insert(0, track)
            else:
                self.logger.warning("Unable to raise track #%d")

    def play_next(self, track: Song) -> Tuple[Song, int]:
        with self.lock:
            user_id = track.user_id
            if user_id is None:
                user_id = -1

            self.playlists[user_id].insert(0, track)

            try:
                self.queue.remove(user_id)
            except ValueError:
                pass
            self.queue.insert(0, user_id)

            return track, len(self.playlists[user_id])

    def get_track(self, tid: int) -> Optional[Song]:
        user_ids = (uid for uid in self.queue if uid in self.playlists.keys())
        for uid in user_ids:
            for t in self.playlists[uid]:
                if t.id == tid:
                    return t
        return None

    def get_track_position(
            self,
            track: Optional[Song]) -> Tuple[Optional[int], Optional[int]]:
        if track is None:
            return None, None

        user_ids = (uid for uid in self.queue if uid in self.playlists.keys())

        user_position = self.queue.index(track.user_id)
        user_ids_ahead = self.queue[:user_position + 1]

        loc = self.playlists[track.user_id].index(track) + 1
        glob = sum(map(lambda uid: min(len(self.playlists[uid]), loc - 1), user_ids)) + \
            sum(1 for uid in user_ids_ahead if len(self.playlists[uid]) >= loc)

        return loc, glob

    def get_all_tracks(self) -> List[Song]:
        res = []
        for uid in self.playlists:
            res += [track for track in self.playlists[uid]]
        return res

    def get_tracks_queue_length(self):
        return sum(
            len(p) for p in (self.playlists[uid] for uid in self.queue
                             if uid in self.playlists))

    def get_user_tracks(self, user_id: int) -> List[Song]:
        if user_id not in self.playlists:
            return []
        return self.playlists[user_id]

    def get_queue_tracks(self, offset: int = 0, limit: int = 0) -> List[Song]:
        tracks = []
        queue = [uid for uid in self.queue if uid in self.playlists.keys()]

        i = 0
        while True:
            end = True
            for uid in queue:
                if len(self.playlists[uid]) <= i:
                    continue
                tracks.append(self.playlists[uid][i])
                end = False
            i += 1
            if end:
                break

        if limit == 0:
            return tracks[offset:]
        else:
            return tracks[offset:offset + limit]

    def pop_first_track(self) -> Optional[Song]:
        with self.lock:

            track = None
            for uid in self.queue:
                if len(self.playlists[uid]) == 0:
                    continue

                track = self.playlists[uid].pop(0)
                self.queue.remove(uid)
                if len(self.playlists[uid]) != 0:
                    self.queue.append(uid)

                if not os.path.isfile(track.media):
                    self.logger.warning("Media does not exist for track: %s",
                                        track.title)
                    track = None
                    continue

                self.logger.info("Playing track from main queue: %s",
                                 track.title)
                break

            while track is None:
                try:
                    track: Optional[Song] = self.backlog.pop(0)
                    if not os.path.isfile(track.media):
                        self.logger.warning(
                            "Media does not exist for fallback track: %s",
                            track.title)
                        track = None
                        continue

                    self.logger.info(
                        "Playing track from fallback playlist: %s",
                        track.title)
                    self.backlog_played.append(track)

                    if len(self.backlog) <= len(self.backlog_played):
                        i = random.randrange(len(self.backlog_played))
                        self.backlog.append(self.backlog_played.pop(i))
                except IndexError:
                    track = None
                    break

            return track

    def get_first_track(self) -> Optional[Song]:
        for uid in self.queue:
            if len(self.playlists[uid]) > 0:
                return self.playlists[uid][0]
        try:
            return self.backlog[0]
        except IndexError:
            return None

    # Queue manipulations
    #
    # def get_queue(self, offset=0, limit=0) -> List[UID]:
    #     if limit == 0:
    #         return list(self.queue)[offset:]
    #     else:
    #         return list(self.queue)[offset:offset + limit]

    def get_users_queue_length(self):
        return len(self.queue)

    def is_in_queue(self, user_id: int) -> bool:
        return user_id in self.queue

    def add_to_queue(self, user_id: int):
        with self.lock:
            if user_id in self.queue:
                position = self.queue.index(user_id)
            else:
                if len(self.playlists[user_id]) > 0:
                    self.queue.append(user_id)
                    position = len(self.queue)
                else:
                    position = None
                    self.logger.warning(
                        "User can't enter queue with empty playlist")
            return position

    def remove_from_queue(self, user_id: int):
        with self.lock:
            try:
                position = self.queue.index(user_id)
                self.queue.remove(user_id)
            except ValueError:
                position = None
                self.logger.warning(
                    "Unable to remove user #%d from the queue" % user_id)
            return position

    def raise_user_in_queue(self, user_id: int):
        with self.lock:
            try:
                self.queue.remove(user_id)
                self.queue.insert(0, user_id)
            except ValueError:
                self.logger.warning("Unable to raise user #%d in the queue" %
                                    user_id)

    # Voting

    def vote_up(self, user_id: int, track_id: int):
        with self.lock:
            tracks = self.get_all_tracks()
            try:
                track = next(t for t in tracks if t.id == track_id)
                if user_id in track.haters:
                    track.haters.remove(user_id)
            except (ValueError, StopIteration):
                self.logger.warning(
                    "Unable to find track #%d in the playlists" % track_id)

    def vote_down(self, user_id: int, track_id: int):
        with self.lock:
            tracks = self.get_all_tracks()
            try:
                track = next(t for t in tracks if t.id == track_id)
                if user_id not in track.haters:
                    track.haters.append(user_id)
            except (ValueError, StopIteration):
                self.logger.warning(
                    "Unable to find track #%d in the playlists" % track_id)
示例#19
0
    down_time_seconds = prometheus_querier.get_downtime_average_seconds(
        results)
    if len(down_time_seconds) <= 1:
        return 0
    else:
        average_seconds = sum(down_time_seconds) / len(down_time_seconds)
        average_minutes = average_seconds / 60
        print("Count: ", len(down_time_seconds), "Avg length: (minutes)",
              average_minutes)
        return average_minutes


from prometheus_client import Gauge
IN_PROGRESS = Gauge("mtd_myapp_mean_time_to_recover",
                    "Average time for app to recover")
IN_PROGRESS.set_function(get_downtime_average)


def my_app(environ, start_fn):
    if environ['PATH_INFO'] == '/metrics':
        # IN_PROGRESS.inc()

        return metrics_app(environ, start_fn)
    start_fn('200 OK', [])
    return [b'Metrics are hosted at /metrics']
    # return ["how are you"]


if __name__ == '__main__':
    print("Starting server on port ", PORT)
    httpd = make_server('', PORT, my_app)
示例#20
0
        LOG.exception('Could not get active_users')
        count = 0
    return count


global_registry().gauge_callback(
    name='users.active',
    callback=active_users,
    label='Active Users',
    description='Number of users that were active in the last hour',
    numerator='users',
)

prometheus_active_users = Gauge('hue_active_users',
                                'Hue Active Users in All Instances')
prometheus_active_users.set_function(active_users)


def active_users_per_instance():
    from useradmin.models import UserProfile
    try:
        count = UserProfile.objects.filter(
            last_activity__gt=datetime.now() - timedelta(hours=1),
            hostname=get_localhost_name()).count()
    except:
        LOG.exception('Could not get active_users per instance')
        count = 0
    return count


global_registry().gauge_callback(
示例#21
0
    def process(self, i, k):
        return int(self.data[i][k + 1])


if __name__ == '__main__':
    #fetchConfig()
    cNVLink = Counter('gpu_nvlink_read_error_total',
                      'Exceptions during reading data of nvlink')
    cPCI = Counter('gpu_pci_read_error_total',
                   'Exceptions during reading data of pci')
    #fetchData()
    dataNVLinkKBytes = fetcherNVLink(
        0, re.compile('\s+Link (\d+): Rx0: (\d+) KBytes, Tx0: (\d+) KBytes'))
    dataNVLinkKBytes.fetch()
    g0 = Gauge('gpu_nvlink_0_count_total', 'Number of NVLink connections')
    g0.set_function(lambda: dataNVLinkKBytes.fetch())
    RxNVLink0 = Gauge('gpu_nvlink_rx_kbytes', 'Received KBytes via NVLink',
                      ['GPUID', 'LinkID'])
    TxNVLink0 = Gauge('gpu_nvlink_tx_kbytes', 'Transmitted KBytes via NVLink',
                      ['GPUID', 'LinkID'])
    dataNVLinkKpackets = fetcherNVLink(
        1,
        re.compile('\s+Link (\d+): Rx1: (\d+) Kpackets, Tx1: (\d+) Kpackets'))
    dataNVLinkKpackets.fetch()
    g1 = Gauge('gpu_nvlink_1_count_total', 'Number of NVLink connections')
    g1.set_function(lambda: dataNVLinkKpackets.fetch())
    RxNVLink1 = Gauge('gpu_nvlink_rx_kpakets', 'Received Kpackets via NVLink',
                      ['GPUID', 'LinkID'])
    TxNVLink1 = Gauge('gpu_nvlink_tx_kpakets',
                      'Transmitted Kpackets via NVLink', ['GPUID', 'LinkID'])
    dataPCI = fetcherPCI()
示例#22
0
def setup():
    d = Gauge('storage_space_thumb', 'Available Storage Space for Thumbnails')
    d.set_function(storage_available_thumb)
示例#23
0
import time
from prometheus_client import start_http_server
from prometheus_client import Gauge

TIME = Gauge('time_seconds',
             'The current time.')
TIME.set_function(lambda: time.time())

if __name__ == "__main__":
    start_http_server(8000)
    while True:
        time.sleep(1)
示例#24
0
    rate_denominator='seconds',
)

# ------------------------------------------------------------------------------


def num_of_queries():
    from desktop.models import Document2  # Avoid circular dependency
    try:
        count = Document2.objects.filter(type__istartswith='query-',
                                         is_history=True,
                                         last_modified__gt=datetime.now() -
                                         timedelta(minutes=10)).count()
    except:
        LOG.exception('Could not get num_of_queries')
        count = 0
    return count


global_registry().gauge_callback(
    name='queries.number',
    callback=num_of_queries,
    label='number of queries',
    description='Number of queries were executed in last 10 minutes',
    numerator='users',
)

prometheus_queries_numbers = Gauge('hue_queries_numbers',
                                   'Hue - numbers of queries')
prometheus_queries_numbers.set_function(num_of_queries)
示例#25
0
文件: server.py 项目: cleviry/Souei
    await runner.setup()
    site = web.TCPSite(runner, '0.0.0.0', 8000)
    await site.start()


available_proxy_gauge = Gauge('souei_available_proxy', '')


def available_proxy_gauge_fn():
    s = sess_maker()
    res = s.query(Proxy).filter(Proxy.status == STATUS_OK).count()
    s.close()
    return res


available_proxy_gauge.set_function(available_proxy_gauge_fn)

error_proxy_gauge = Gauge('souei_error_proxy', '')


def error_proxy_gauge_fn():
    s = sess_maker()
    res = s.query(Proxy).filter(Proxy.status == STATUS_ERROR).count()
    s.close()
    return res


error_proxy_gauge.set_function(error_proxy_gauge_fn)

new_proxy_gauge = Gauge('souei_new_proxy', '')
class Exporter():

    def __init__(self):
        self.basebackup_exception = False
        self.xlog_exception = False
        self.bbs = []
        self.last_archive_check = None
        self.archive_status = None

        # Declare metrics
        self.basebackup = Gauge('walg_basebackup',
                                'Remote Basebackups',
                                ['start_wal_segment', 'start_lsn'])
        self.basebackup_count = Gauge('walg_basebackup_count',
                                      'Remote Basebackups count')
        self.basebackup_count.set_function(lambda: len(self.bbs))

        self.last_upload = Gauge('walg_last_upload',
                                 'Last upload of incremental or full backup',
                                 ['type'])
        self.last_upload.labels('xlog').set_function(
            self.last_xlog_upload_callback)
        self.last_upload.labels('basebackup').set_function(
            lambda: self.bbs[len(self.bbs) - 1]['start_time'].timestamp()
            if self.bbs else 0
        )
        self.oldest_basebackup = Gauge('walg_oldest_basebackup',
                                       'oldest full backup')
        self.oldest_basebackup.set_function(
            lambda: self.bbs[0]['start_time'].timestamp() if self.bbs else 0
        )

        self.xlog_ready = Gauge('walg_missing_remote_wal_segment_at_end',
                                'Xlog ready for upload')
        self.xlog_ready.set_function(self.xlog_ready_callback)

        self.exception = Gauge('walg_exception',
                               'Wal-g exception: 2 for basebackup error, '
                               '3 for xlog error and '
                               '5 for remote error')
        self.exception.set_function(
            lambda: (1 if self.basebackup_exception else 0 +
                     2 if self.xlog_exception else 0))

        self.xlog_since_last_bb = Gauge('walg_xlogs_since_basebackup',
                                        'Xlog uploaded since last base backup')
        self.xlog_since_last_bb.set_function(self.xlog_since_last_bb_callback)

        self.last_backup_duration = Gauge('walg_last_backup_duration',
                                          'Duration of the last full backup')
        self.last_backup_duration.set_function(
            lambda: ((self.bbs[len(self.bbs) - 1]['finish_time'] -
                      self.bbs[len(self.bbs) - 1]['start_time']).total_seconds()
                     if self.bbs else 0)
        )
        self.walg_backup_fuse = Gauge('walg_backup_fuse',"0 backup fuse is OK, 1 backup fuse is burnt")
        self.walg_backup_fuse.set_function(self.backup_fuse_callback)
        # Fetch remote base backups
        self.update_basebackup()

    def update_basebackup(self, *unused):
        """
            When this script receive a SIGHUP signal, it will call backup-list
            and update metrics about basebackups
        """

        info('Updating basebackups metrics...')
        try:
            # Fetch remote backup list
            res = subprocess.run(["wal-g", "backup-list",
                                  "--detail", "--json"],
                                 capture_output=True, check=True)
            new_bbs = list(map(format_date, json.loads(res.stdout)))
            new_bbs.sort(key=lambda bb: bb['start_time'])
            new_bbs_name = [bb['backup_name'] for bb in new_bbs]
            old_bbs_name = [bb['backup_name'] for bb in self.bbs]
            bb_deleted = 0

            # Remove metrics for deleted backups
            for bb in self.bbs:
                if bb['backup_name'] not in new_bbs_name:
                    # Backup deleted
                    self.basebackup.remove(bb['wal_file_name'],
                                           bb['start_lsn'])
                    bb_deleted = bb_deleted + 1
            # Add metrics for new backups
            for bb in new_bbs:
                if bb['backup_name'] not in old_bbs_name:
                    (self.basebackup.labels(bb['wal_file_name'],
                                            bb['start_lsn'])
                     .set(bb['start_time'].timestamp()))
            # Update backup list
            self.bbs = new_bbs
            info("%s basebackups found (first: %s, last: %s), %s deleted",
                 len(self.bbs),
                 self.bbs[0]['start_time'],
                 self.bbs[len(self.bbs) - 1]['start_time'],
                 bb_deleted)

            self.basebackup_exception = False
        except subprocess.CalledProcessError as e:
            error(e)
            self.basebackup_exception = True

    def last_archive_status(self):
        if (self.last_archive_check is None or
                datetime.datetime.now().timestamp() -
                self.last_archive_check > 1):
            self.archive_status = self._last_archive_status()
            self.last_archive_check = datetime.datetime.now().timestamp()
        return self.archive_status

    def _last_archive_status(self):
        with psycopg2.connect(
            host=os.getenv('PGHOST', 'localhost'),
            port=os.getenv('PGPORT', '5432'),
            user=os.getenv('PGUSER', 'postgres'),
            password=os.getenv('PGPASSWORD'),
            dbname=os.getenv('PGDATABASE', 'postgres'),

        ) as db_connection:
            db_connection.autocommit = True
            with db_connection.cursor(cursor_factory=DictCursor) as c:
                c.execute('SELECT archived_count, failed_count, '
                          'last_archived_wal, '
                          'last_archived_time, '
                          'last_failed_wal, '
                          'last_failed_time '
                          'FROM pg_stat_archiver')
                res = c.fetchone()
                if not bool(result):
                    raise Exception("Cannot fetch archive status")
                return res

    def last_xlog_upload_callback(self):
        archive_status = self.last_archive_status()
        return archive_status['last_archived_time'].timestamp()

    def xlog_ready_callback(self):
        res = 0
        try:
            for f in os.listdir(archive_dir):
                # search for xlog waiting for upload
                if READY_WAL_RE.match(f):
                    res += 1
            self.xlog_exception = 0
        except FileNotFoundError:
            self.xlog_exception = 1
        return res

    def xlog_since_last_bb_callback(self):
        # Compute xlog_since_last_basebackup
        if self.bbs:
            archive_status = self.last_archive_status()
            return wal_diff(archive_status['last_archived_wal'],
                            self.bbs[len(self.bbs) - 1]['wal_file_name'])
        else:
            return 0

    def backup_fuse_callback(self):
        return int(os.path.exists('/tmp/failed_pg_archive'))
start_http_server(PROMETHEUS_PORT)

request_count = Counter('example_requests_total',
                        'Count of processed requests', (
                            'method',
                            'handler',
                        ))
bitcoin_gauge = Gauge('example_bitcoin_exchange_rate_dollar',
                      'Bitcoin to dollar rate')
danger_gauge = Gauge('example_danger', 'Danger gauge', ('env', ))
request_latency_histogram = Histogram(
    'example_requests_latency_histogram_seconds', 'Histogram', ('handler', ))
request_latency_summary = Summary('example_requests_latency_summary_seconds',
                                  'Summary', ('handler', ))

bitcoin_gauge.set_function(lambda: requests.get(
    'https://blockchain.info/ticker').json()['USD']['last'])
for _env in ENV_LIST:
    danger_gauge.labels(env=_env).set(0)


@app.context_processor
def inject_stage_and_region():
    return dict(BASE_URL=BASE_URL)


@app.route('/')
def index():
    request_count.labels(method='get', handler='/').inc()

    val = random.gauss(0.5, 0.2)
    request_latency_histogram.labels(handler='/').observe(val)
示例#28
0
            "total": 0.0,
            "err_code": 0
        }
    }
}

# Send command and receive reply

# Create a metric to track time spent and requests made.
# Gaugage: it goes up and down, snapshot of state

REQUEST_POWER = Gauge('hs110_power_watt', 'HS110 Watt measure')
REQUEST_CURRENT = Gauge('hs110_current', 'HS110 Current measure')
REQUEST_VOLTAGE = Gauge('hs110_voltage', 'HS110 Voltage measure')

REQUEST_POWER.set_function(lambda: get_power())
REQUEST_CURRENT.set_function(lambda: get_current())
REQUEST_VOLTAGE.set_function(lambda: get_voltage())


def get_power():
    """ Get HS110 power """
    try:
        return received_data["emeter"]["get_realtime"]["power"]
    except socket.error:
        quit("Could not connect to host " + ip + ":" + str(port))
        return 0


def get_current():
    """ Get HS110 current """
示例#29
0
def setup():
    d = Gauge('job_count_ready', 'Number of jobs currently ready')
    d.set_function(lambda: job_count_running('ready'))
    d = Gauge('job_count_running', 'Number of jobs currently running')
    d.set_function(lambda: job_count_running('running'))
示例#30
0
文件: server.py 项目: vladertel/tg_dj
class StatusWebServer(AbstractComponent):
    def __init__(self, config):
        self.config = config
        self.logger = logging.getLogger("tg_dj.web")
        self.logger.setLevel(
            getattr(
                logging,
                self.config.get("web_server", "verbosity",
                                fallback="warning").upper()))

        settings = {
            "static_path": os.path.join(os.path.dirname(__file__), "static"),
            "debug": False,
        }

        app = tornado.web.Application([
            (r"/", MainHandler, dict(server=self)),
            (r'/ws', WebSocketHandler, dict(server=self)),
        ], **settings)

        app.listen(
            port=self.config.getint("web_server", "listen_port",
                                    fallback=8080),
            address=self.config.get("web_server",
                                    "listen_addr",
                                    fallback="127.0.0.1"),
        )

        self.core = None
        self.ws_clients = []
        self.KEEP_ALIVE_INTERVAL = 60

        # noinspection PyArgumentList
        self.mon_web_ws_clients = Gauge('dj_web_ws_clients',
                                        'Number of websocket connections')
        self.mon_web_ws_clients.set_function(lambda: len(self.ws_clients))

        self.stream_url = self.config.get("web_server",
                                          "stream_url",
                                          fallback="/stream")
        self.ws_url = self.config.get("web_server", "ws_url", fallback="auto")
        self.telegram_bot_name = self.config.get("web_server",
                                                 "telegram_bot_name",
                                                 fallback="inbicst_dj_bot")

    def get_name(self) -> str:
        return "StatusWebServer"

    def bind_core(self, core):
        self.core = core
        self.core.add_state_update_callback(self.update_state)

    def get_current_state(self):
        track = self.core.get_current_song()
        track_dict = Song.to_dict(track)

        progress = self.core.get_song_progress()
        return track_dict, progress

    def update_state(self, track: Song):
        if track is not None:
            self.broadcast_update(track.to_dict())
        else:
            self.broadcast_stop()

    def broadcast_update(self, data):
        for c in self.ws_clients:
            c.send("update", data)

    def broadcast_stop(self):
        for c in self.ws_clients:
            c.send("stop_playback", {})
示例#31
0
def monitor_host_metrics():
    # cpu
    process_cpu_usage_percents = Gauge('process_cpu_usage_percents',
                                       'CPU Usage in percents')
    process_cpu_usage_percents.set_function(psutil.cpu_percent())
    process_cpu_time_user_mode = Gauge('process_cpu_time_user_mode', '')
    process_cpu_time_user_mode.set_function(lambda: psutil.cpu_times().user)
    process_cpu_time_system_mode = Gauge('process_cpu_time_system_mode', '')
    process_cpu_time_system_mode.set_function(
        lambda: psutil.cpu_times().system)
    process_cpu_time_idle_mode = Gauge('process_cpu_time_idle_mode', '')
    process_cpu_time_idle_mode.set_function(lambda: psutil.cpu_times().idle)
    # mem
    MEMORY_TOTAL = Gauge('host_memory_total_bytes', '')

    if ("MEMORY_LIMIT" in os.environ):
        mem_limit_str = os.environ["MEMORY_LIMIT"]
        MEMORY_TOTAL.set(mem_limit_str[:len(mem_limit_str) - 1])
    else:
        MEMORY_TOTAL.set_function(lambda: virtual_memory().total)

    MEMORY_CACHED = Gauge('host_memory_cached_bytes', '')
    MEMORY_CACHED.set_function(lambda: virtual_memory().cached)
    MEMORY_INACTIVE = Gauge('host_memory_inactive_bytes', '')
    MEMORY_INACTIVE.set_function(lambda: virtual_memory().inactive)
    MEMORY_ACTIVE = Gauge('host_memory_active_bytes', '')
    MEMORY_ACTIVE.set_function(lambda: virtual_memory().active)
    MEMORY_BUFFERS = Gauge('host_memory_buffers_bytes', '')
    MEMORY_BUFFERS.set_function(lambda: virtual_memory().buffers)
    MEMORY_FREE = Gauge('host_memory_free_bytes', '')
    MEMORY_FREE.set_function(lambda: virtual_memory().free)
    host_memory_used_bytes = Gauge('host_memory_used_bytes', '')
    host_memory_used_bytes.set_function(lambda: virtual_memory().used)
    MEMORY_PERCENT = Gauge('host_memory_percents', '')
    MEMORY_PERCENT.set_function(lambda: virtual_memory().percent)
    SWAP_MEMORY_PERCENT = Gauge('host_swap_memory_percent', '')
    SWAP_MEMORY_PERCENT.set_function(lambda: psutil.swap_memory().percent)
    SWAP_MEMORY_USED = Gauge('host_swap_memory_used_bytes', '')
    SWAP_MEMORY_USED.set_function(lambda: psutil.swap_memory().used)
    SWAP_MEMORY_FREE = Gauge('host_swap_memory_free_bytes', '')
    SWAP_MEMORY_FREE.set_function(lambda: psutil.swap_memory().free)

    # network
    network_bytes_sent_int = Gauge('network_bytes_sent_int', '', ["interface"])
    network_bytes_recv_int = Gauge(
        'network_bytes_recv_int', 'Total bytes received via current interface',
        ["interface"])

    for interface in netifaces.interfaces():
        network_bytes_sent_int.labels(interface).set_function(
            lambda: net_stat.rx_tx_bytes(interface)[0])
        network_bytes_recv_int.labels(interface).set_function(
            lambda: net_stat.rx_tx_bytes(interface)[1])

    # //per second can be calculate from total

    network_bytes_sent = Gauge('host_net_tx_bytes', '')
    network_bytes_sent.set_function(
        lambda: psutil.net_io_counters().bytes_sent)

    network_bytes_recv = Gauge('network_bytes_recv', 'Total bytes received')
    network_bytes_recv.set_function(
        lambda: psutil.net_io_counters().bytes_recv)

    network_packets_sent = Gauge('network_packets_sent', '')
    network_packets_sent.set_function(
        lambda: psutil.net_io_counters().packets_sent)

    network_packets_recv = Gauge('network_packets_recv', '')
    network_packets_recv.set_function(
        lambda: psutil.net_io_counters().packets_recv)

    network_errin = Gauge('network_errin', '')
    network_errin.set_function(lambda: psutil.net_io_counters().errin)

    network_errout = Gauge('network_errout', '')
    network_errout.set_function(lambda: psutil.net_io_counters().errout)

    network_dropin = Gauge('network_dropin', '')
    network_dropin.set_function(lambda: psutil.net_io_counters().dropin)

    network_dropout = Gauge('network_dropout', '')
    network_dropout.set_function(lambda: psutil.net_io_counters().dropout)

    # disk IO
    DISK_READ = Gauge('host_disk_reads', 'Total reads for all disks')
    DISK_READ.set_function(disk_stat.disk_reads_persec())

    DISK_WRITE = Gauge('host_disk_writes', 'Total writes for all disks')
    DISK_WRITE.set_function(disk_stat.disk_writes_persec())

    host_disk_total = Gauge('host_disk_total', '')
    host_disk_total.set_function(lambda: psutil.disk_usage("/").total)
    host_disk_free = Gauge('host_disk_free', '')
    host_disk_free.set_function(lambda: psutil.disk_usage("/").free)
    host_disk_used = Gauge('host_disk_used', '')
    host_disk_used.set_function(lambda: psutil.disk_usage("/").used)
    host_disk_percent = Gauge('host_disk_percent', '')
    host_disk_percent.set_function(lambda: psutil.disk_usage("/").percent)
with c.count_exceptions():
    pass

# Count only one type of exception
with c.count_exceptions(ValueError):
    pass

# Gauges: Used to track any value, anything that's not counting will be here (e.g. temperature, cpu usage, ...)
# Can inc, dec, and set
g = Gauge('my_inprogress_requests', 'Description of gauge')
g.inc()  # Increment by 1
g.dec(10)  # Decrement by given value
g.set(4.2)  # Set to a given value

g.set_to_current_time()  # Set to current unixtime


# Another use case: Increment when entered, decrement when exited.
@g.track_inprogress()
def f():
    pass


with g.track_inprogress():
    pass

# A gauge can also take its value from a callback
d = Gauge('data_objects', 'Number of objects')
my_dict = {}
d.set_function(lambda: len(my_dict))
示例#33
0
TEMPERATURE_CELCIUS = Gauge('current_temperature_celcius',
                            'Current Temperature in Celcius')

tc = TemperatureSensor()
tc.setHubPort(1)
tc.setChannel(0)

tc.openWaitForAttachment(5000)


def c_to_f(c):
    return (9 / 5 * c + 32)


def get_temp(fahrenheit=False):
    temp_celcius = tc.getTemperature()
    if fahrenheit:
        temp = c_to_f(temp_celcius)
    else:
        temp = temp_celcius
    return round(temp, 1)


if __name__ == '__main__':
    start_http_server(8000)
    while True:
        TEMPERATURE_FAHRENHEIT.set_function(lambda: get_temp(fahrenheit=True))
        TEMPERATURE_CELCIUS.set_function(lambda: get_temp(fahrenheit=False))

        time.sleep(1)