Esempio n. 1
0
def main():
    utils.drop_privileges()
    if BinLogStreamReader is None:
        utils.err("error: Python module `pymysqlreplication' is missing")
        return 1
    settings = zabbix_bridge_conf.get_settings()

    # Set blocking to True if you want to block and wait for the next event at
    # the end of the stream
    stream = BinLogStreamReader(connection_settings=settings['mysql'],
                                server_id=settings['slaveid'],
                                only_events=[WriteRowsEvent],
                                resume_stream=True,
                                blocking=True)

    db_filename = settings['sqlitedb']
    dbcache = sqlite3.connect(':memory:')
    cachecur = dbcache.cursor()
    cachecur.execute("ATTACH DATABASE '%s' as 'dbfile'" % (db_filename,))
    cachecur.execute('CREATE TABLE zabbix_cache AS SELECT * FROM dbfile.zabbix_cache')
    cachecur.execute('CREATE UNIQUE INDEX uniq_zid on zabbix_cache (id)')

    # tcollector.zabbix_bridge namespace for internal Zabbix bridge metrics.
    log_pos = 0
    key_lookup_miss = 0
    sample_last_ts = int(time.time())
    last_key_lookup_miss = 0

    for binlogevent in stream:
        if binlogevent.schema == settings['mysql']['db']:
            table = binlogevent.table
            log_pos = binlogevent.packet.log_pos
            if table == 'history' or table == 'history_uint':
                for row in binlogevent.rows:
                    r = row['values']
                    itemid = r['itemid']
                    cachecur.execute('SELECT id, key, host, proxy FROM zabbix_cache WHERE id=?', (itemid,))
                    row = cachecur.fetchone()
                    if (row is not None):
                        print("zbx.%s %d %s host=%s proxy=%s" % (row[1], r['clock'], r['value'], row[2], row[3]))
                        if ((int(time.time()) - sample_last_ts) > settings['internal_metric_interval']): # Sample internal metrics @ 10s intervals
                            sample_last_ts = int(time.time())
                            print("tcollector.zabbix_bridge.log_pos %d %s" % (sample_last_ts, log_pos))
                            print("tcollector.zabbix_bridge.key_lookup_miss %d %s" % (sample_last_ts, key_lookup_miss))
                            print("tcollector.zabbix_bridge.timestamp_drift %d %s" % (sample_last_ts, (sample_last_ts - r['clock'])))
                            if ((key_lookup_miss - last_key_lookup_miss) > settings['dbrefresh']):
                                print("tcollector.zabbix_bridge.key_lookup_miss_reload %d %s" % (sample_last_ts, (key_lookup_miss - last_key_lookup_miss)))
                                cachecur.execute('DROP TABLE zabbix_cache')
                                cachecur.execute('CREATE TABLE zabbix_cache AS SELECT * FROM dbfile.zabbix_cache')
                                cachecur.execute('CREATE UNIQUE INDEX uniq_zid on zabbix_cache (id)')
                                last_key_lookup_miss = key_lookup_miss
                    else:
                        # TODO: Consider https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
                        utils.err("error: Key lookup miss for %s" % (itemid))
                        key_lookup_miss += 1
                sys.stdout.flush()

    dbcache.close()
    stream.close()
Esempio n. 2
0
def main():
    utils.drop_privileges()
    if BinLogStreamReader is None:
        utils.err("error: Python module `pymysqlreplication' is missing")
        return 1
    settings = zabbix_bridge_conf.get_settings()

    # Set blocking to True if you want to block and wait for the next event at
    # the end of the stream
    stream = BinLogStreamReader(connection_settings=settings['mysql'],
                                server_id=settings['slaveid'],
                                only_events=[WriteRowsEvent],
                                resume_stream=True,
                                blocking=True)

    db_filename = settings['sqlitedb']
    dbcache = sqlite3.connect(':memory:')
    cachecur = dbcache.cursor()
    cachecur.execute("ATTACH DATABASE '%s' as 'dbfile'" % (db_filename,))
    cachecur.execute('CREATE TABLE zabbix_cache AS SELECT * FROM dbfile.zabbix_cache')
    cachecur.execute('CREATE UNIQUE INDEX uniq_zid on zabbix_cache (id)')

    # tcollector.zabbix_bridge namespace for internal Zabbix bridge metrics.
    log_pos = 0
    key_lookup_miss = 0
    sample_last_ts = int(time.time())
    last_key_lookup_miss = 0

    for binlogevent in stream:
        if binlogevent.schema == settings['mysql']['db']:
            table = binlogevent.table
            log_pos = binlogevent.packet.log_pos
            if table == 'history' or table == 'history_uint':
                for row in binlogevent.rows:
                    r = row['values']
                    itemid = r['itemid']
                    cachecur.execute('SELECT id, key, host, proxy FROM zabbix_cache WHERE id=?', (itemid,))
                    row = cachecur.fetchone()
                    if (row is not None):
                        print "zbx.%s %d %s host=%s proxy=%s" % (row[1], r['clock'], r['value'], row[2], row[3])
                        if ((int(time.time()) - sample_last_ts) > settings['internal_metric_interval']): # Sample internal metrics @ 10s intervals
                            sample_last_ts = int(time.time())
                            print "tcollector.zabbix_bridge.log_pos %d %s" % (sample_last_ts, log_pos)
                            print "tcollector.zabbix_bridge.key_lookup_miss %d %s" % (sample_last_ts, key_lookup_miss)
                            print "tcollector.zabbix_bridge.timestamp_drift %d %s" % (sample_last_ts, (sample_last_ts - r['clock']))
                            if ((key_lookup_miss - last_key_lookup_miss) > settings['dbrefresh']):
                                print "tcollector.zabbix_bridge.key_lookup_miss_reload %d %s" % (sample_last_ts, (key_lookup_miss - last_key_lookup_miss))
                                cachecur.execute('DROP TABLE zabbix_cache')
                                cachecur.execute('CREATE TABLE zabbix_cache AS SELECT * FROM dbfile.zabbix_cache')
                                last_key_lookup_miss = key_lookup_miss
                    else:
                        # TODO: Consider https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
                        utils.err("error: Key lookup miss for %s" % (itemid))
                        key_lookup_miss += 1
                sys.stdout.flush()

    dbcache.close()
    stream.close()
Esempio n. 3
0
def main():
    utils.drop_privileges()
    if pymysql is None:
        utils.err("error: Python module `pymysql' is missing")
        return 1
    settings = zabbix_bridge_conf.get_settings()

    db_filename = settings['sqlitedb']
    db_is_new = not os.path.exists(db_filename)
    dbcache = sqlite3.connect(db_filename)

    if db_is_new:
        utils.err("Zabbix bridge SQLite DB file does not exist; creating: %s" %
                  (db_filename))
        cachecur = dbcache.cursor()
        cachecur.execute('''CREATE TABLE zabbix_cache
             (id integer, key text, host text, proxy text)''')
        dbcache.commit()
    else:
        utils.err("Zabbix bridge SQLite DB exists @ %s" % (db_filename))

    dbzbx = pymysql.connect(**settings['mysql'])
    zbxcur = dbzbx.cursor()
    zbxcur.execute(
        "SELECT i.itemid, i.key_, h.host, h2.host AS proxy FROM items i JOIN hosts h ON i.hostid=h.hostid LEFT JOIN hosts h2 ON h2.hostid=h.proxy_hostid"
    )
    # Translation of item key_
    # Note: http://opentsdb.net/docs/build/html/user_guide/writing.html#metrics-and-tags
    disallow = re.compile(settings['disallow'])
    cachecur = dbcache.cursor()
    print('tcollector.zabbix_bridge.deleterows %d %s' % (int(
        time.time()), cachecur.execute('DELETE FROM zabbix_cache').rowcount))
    rowcount = 0
    for row in zbxcur:
        cachecur.execute(
            '''INSERT INTO zabbix_cache(id, key, host, proxy) VALUES (?,?,?,?)''',
            (row[0], re.sub(disallow, '_',
                            row[1]), re.sub(disallow, '_', row[2]), row[3]))
        rowcount += 1

    print('tcollector.zabbix_bridge.rows %d %s' % (int(time.time()), rowcount))
    zbxcur.close()
    dbcache.commit()

    dbzbx.close()
    dbcache.close()
Esempio n. 4
0
def main():
    utils.drop_privileges()
    if pymysql is None:
        utils.err("error: Python module `pymysql' is missing")
        return 1
    settings = zabbix_bridge_conf.get_settings()

    db_filename = settings['sqlitedb']
    db_is_new = not os.path.exists(db_filename)
    dbcache = sqlite3.connect(db_filename)

    if db_is_new:
        utils.err("Zabbix bridge SQLite DB file does not exist; creating: %s" % (db_filename))
        cachecur = dbcache.cursor()
        cachecur.execute('''CREATE TABLE zabbix_cache
             (id integer, key text, host text, proxy text)''')
        dbcache.commit()
    else:
        utils.err("Zabbix bridge SQLite DB exists @ %s" % (db_filename))


    dbzbx = pymysql.connect(**settings['mysql'])
    zbxcur = dbzbx.cursor()
    zbxcur.execute("SELECT i.itemid, i.key_, h.host, h2.host AS proxy FROM items i JOIN hosts h ON i.hostid=h.hostid LEFT JOIN hosts h2 ON h2.hostid=h.proxy_hostid")
    # Translation of item key_
    # Note: http://opentsdb.net/docs/build/html/user_guide/writing.html#metrics-and-tags
    disallow = re.compile(settings['disallow'])
    cachecur = dbcache.cursor()
    print('tcollector.zabbix_bridge.deleterows %d %s' %
     (int(time.time()), cachecur.execute('DELETE FROM zabbix_cache').rowcount))
    rowcount = 0
    for row in zbxcur:
        cachecur.execute('''INSERT INTO zabbix_cache(id, key, host, proxy) VALUES (?,?,?,?)''',
         (row[0], re.sub(disallow, '_', row[1]), re.sub(disallow, '_', row[2]), row[3]))
        rowcount += 1

    print('tcollector.zabbix_bridge.rows %d %s' % (int(time.time()), rowcount))
    zbxcur.close()
    dbcache.commit()

    dbzbx.close()
    dbcache.close()
Esempio n. 5
0
def main():
    with utils.lower_privileges(self._logger):
        if BinLogStreamReader is None:
            utils.err("error: Python module `pymysqlreplication' is missing")
            return 1
        if pymysql is None:
            utils.err("error: Python module `pymysql' is missing")
            return 1
        settings = zabbix_bridge_conf.get_settings()

        # Set blocking to True if you want to block and wait for the next event at
        # the end of the stream
        stream = BinLogStreamReader(connection_settings=settings['mysql'],
                                    server_id=settings['slaveid'],
                                    only_events=[WriteRowsEvent],
                                    resume_stream=True,
                                    blocking=True)

        hostmap = gethostmap(settings)  # Prime initial hostmap
        for binlogevent in stream:
            if binlogevent.schema == settings['mysql']['db']:
                table = binlogevent.table
                log_pos = binlogevent.packet.log_pos
                if table == 'history' or table == 'history_uint':
                    for row in binlogevent.rows:
                        r = row['values']
                        itemid = r['itemid']
                        try:
                            hm = hostmap[itemid]
                            print "zbx.%s %d %s host=%s proxy=%s" % (
                                hm['key'], r['clock'], r['value'], hm['host'],
                                hm['proxy'])
                        except KeyError:
                            # TODO: Consider https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
                            hostmap = gethostmap(settings)
                            utils.err("error: Key lookup miss for %s" %
                                      (itemid))
                    sys.stdout.flush()
                    # if n seconds old, reload
                    # settings['gethostmap_interval']

        stream.close()
def main():
    utils.drop_privileges()
    if BinLogStreamReader is None:
        utils.err("error: Python module `pymysqlreplication' is missing")
        return 1
    if pymysql is None:
        utils.err("error: Python module `pymysql' is missing")
        return 1
    settings = zabbix_bridge_conf.get_settings()

    # Set blocking to True if you want to block and wait for the next event at
    # the end of the stream
    stream = BinLogStreamReader(connection_settings=settings['mysql'],
                                server_id=settings['slaveid'],
                                only_events=[WriteRowsEvent],
                                resume_stream=True,
                                blocking=True)

    hostmap = gethostmap(settings) # Prime initial hostmap
    for binlogevent in stream:
        if binlogevent.schema == settings['mysql']['db']:
            table = binlogevent.table
            log_pos = binlogevent.packet.log_pos
            if table == 'history' or table == 'history_uint':
                for row in binlogevent.rows:
                    r = row['values']
                    itemid = r['itemid']
                    try:
                        hm = hostmap[itemid]
                        print "zbx.%s %d %s host=%s proxy=%s" % (hm['key'], r['clock'], r['value'], hm['host'], hm['proxy'])
                    except KeyError:
                        # TODO: Consider https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
                        hostmap = gethostmap(settings)
                        utils.err("error: Key lookup miss for %s" % (itemid))
                sys.stdout.flush()
                # if n seconds old, reload
                # settings['gethostmap_interval']

    stream.close()