예제 #1
0
 def __init__(self, database_file, collection_interval, max_rows_before_purge, max_cached_rows):
     """
     The contructor of the class.
     @type database_file: string
     @param database_file: the path of the database
     @type collection_interval: integer
     @param collection_interval: the intervale between two collection
     @type max_rows_before_purge: integer
     @param max_rows_before_purge: max number of rows that can be stored in database
     @type max_cached_rows: integer
     @param max_cached_rows: max number of rows that are cached into memory
     """
     self.database_file          = database_file
     self.collection_interval    = collection_interval
     self.max_rows_before_purge  = max_rows_before_purge
     self.max_cached_rows        = max_cached_rows
     self.stats_CPU              = []
     self.stats_memory           = []
     self.stats_load             = []
     self.stats_network          = []
     self.current_record         = {}
     uname = subprocess.Popen(["uname", "-rsmo"], stdout=subprocess.PIPE).communicate()[0].split()
     self.uname_stats = {"krelease": uname[0], "kname": uname[1], "machine": uname[2], "os": uname[3]}
     self.database_query_connection = sqlite3.connect(self.database_file)
     self.cursor = self.database_query_connection.cursor()
     self.cursor.execute("create table if not exists cpu (collection_date date, idle int)")
     self.cursor.execute("create table if not exists memory (collection_date date, free integer, used integer, total integer, swapped integer)")
     self.cursor.execute("create table if not exists load (collection_date date, one float, five float, fifteen float)")
     self.cursor.execute("create table if not exists network (collection_date date, records text)")
     log.info("Database ready.")
     self.recover_stored_stats()
     Thread.__init__(self)
 def recover_stored_stats(self):
     """
     Recover info from database.
     """
     log.info("Recovering stored statistics. It may take a while...")
     self.cursor.execute("select * from cpu order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, idle = values
         self.stats_CPU.append({"date": date, "id": idle})
     self.stats_CPU.reverse()
     self.cursor.execute("select * from memory order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, free, used, total, swapped, shared = values
         self.stats_memory.append(
             {"date": date, "free": free, "used": used, "total": total, "swapped": swapped, "shared": shared}
         )
     self.stats_memory.reverse()
     self.cursor.execute("select * from load order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, one, five, fifteen = values
         self.stats_load.append({"date": date, "one": one, "five": five, "fifteen": fifteen})
     self.stats_load.reverse()
     self.cursor.execute("select * from network order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, records = values
         self.stats_network.append({"date": date, "records": records})
     self.stats_network.reverse()
     log.info("Statistics recovered.")
예제 #3
0
 def _did_configure(self, conn, resp):
     """
     Called when node has been configured.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been configured." % self.nodename)
             return True
         else:
             log.error("PUBSUB: can't configure pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to configure pubsub node: %s" % str(ex))
예제 #4
0
 def _did_delete(self, conn, resp):
     """
     Called after pubsub deletion.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been deleted." % self.nodename)
             return True
         else:
             log.error("PUBSUB: can't delete pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to delete pubsub node: %s" % str(ex))
예제 #5
0
 def _did_create(self, conn, resp):
     """
     Called after pubsub creation.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been created." % self.nodename)
             return self.recover(wait=True)
         else:
             log.error("PUBSUB: can't create pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to create pubsub node: %s" % str(ex))
예제 #6
0
 def unsubscribe(self, jid):
     """
     Unsubscribe from a node.
     @type jid: xmpp.Protocol.JID
     @param jid: the JID of the entity to unsubscribe
     """
     self.subscriber_callback    = None
     self.subscriber_jid         = None
     iq                          = xmpp.Iq(typ="set", to=self.pubsubserver)
     pubsub                      = iq.addChild("pubsub", namespace=xmpp.protocol.NS_PUBSUB)
     pubsub.addChild("unsubscribe", attrs={"node": self.nodename, "jid": jid})
     self.xmppclient.UnregisterHandler('message', self.on_pubsub_event, ns=xmpp.protocol.NS_PUBSUB+"#event", typ="headline")
     log.info(str(iq))
     self.xmppclient.send(iq)
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        def connect():
            self.database_thread_connection = sqlite3.connect(self.database_file)
            self.database_thread_cursor = self.database_thread_connection.cursor()
        connect()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")
                    nrRow = int(self.database_thread_cursor.execute("select count(*) from cpu").fetchone()[0])
                    if nrRow > self.max_rows_before_purge * 1.5:
                        self.database_thread_cursor.execute("DELETE FROM cpu WHERE collection_date IN (SELECT collection_date FROM cpu ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM memory WHERE collection_date IN (SELECT collection_date FROM memory ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM load WHERE collection_date IN (SELECT collection_date FROM load ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM network WHERE collection_date IN (SELECT collection_date FROM network ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        log.debug("Old stored stats have been purged from database.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                if str(ex) == "disk I/O error":
                    log.error("Stat collection fails. Exception %s (try to reopenning it)" % str(ex))
                    connect()
                else:
                    log.error("Stat collection fails. Exception %s" % str(ex))
 def recover_stored_stats(self):
     """
     recover info from database
     """
     log.info("recovering stored statistics. It may take a while...")
     self.cursor.execute("select * from cpu order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, idle = values
         self.stats_CPU.insert(0, {"date": date, "id": idle})
     self.cursor.execute("select * from memory order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, free, used, total, swapped = values
         self.stats_memory.insert(0, {"date": date, "free": free, "used": used, "total": total, "swapped": swapped})
     self.cursor.execute("select * from load order by collection_date desc limit %d" % self.max_cached_rows)
     for values in self.cursor:
         date, one, five, fifteen = values
         self.stats_load.insert(0, {"date": date, "one": one, "five": five, "fifteen": fifteen})
     log.info("statistics recovered")
예제 #9
0
 def create(self, wait=False):
     """
     Create node on server if not exists.
     @type wait: Boolean
     @param wait: if True, recovering will be blockant (IE, execution interrupted until recovering)
     @rtype: Boolean
     @return: True in case of success
     """
     log.info("PUBSUB: trying to create pubsub node %s" % self.nodename)
     if self.recovered:
         raise Exception("PUBSUB: can't create. Node %s already exists." % self.nodename)
     iq = xmpp.Iq(typ="set", to=self.pubsubserver)
     iq.addChild(name="pubsub", namespace=xmpp.protocol.NS_PUBSUB).addChild(name="create", attrs={"node": self.nodename})
     if wait:
         resp = self.xmppclient.SendAndWaitForResponse(iq)
         return self._did_create(None, resp)
     else:
         self.xmppclient.SendAndCallForResponse(iq, func=self._did_create)
         return True
예제 #10
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(self.database_thread_cursor.execute("select count(*) from memory").fetchone()[0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute("delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from load where collection_date=(select collection_date from load order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from network where collection_date=(select collection_date from network order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        log.debug("Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))
예제 #11
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while (1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany(
                        "insert into memory values(:date, :free, :used, :total, :swapped)",
                        self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into cpu values(:date, :id)",
                        self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into load values(:date, :one , :five, :fifteen)",
                        self.stats_load[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into network values(:date, :records)",
                        self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(
                            self.database_thread_cursor.execute(
                                "select count(*) from memory").fetchone()
                        [0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute(
                            "delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from load where collection_date=(select collection_date from load order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from network where collection_date=(select collection_date from network order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        log.debug(
                            "Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))