Exemple #1
0
 def did_remove_item(self, conn, response, callback, user_info):
     """
     Triggered on response.
     """
     log.debug("PUBSUB: retract done. Answer is: %s" % str(response))
     if callback:
         callback(response, user_info)
Exemple #2
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while (1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany(
                        "insert into memory values(:date, :free, :used, :total, :swapped, :shared)",
                        self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into cpu values(:date, :id)",
                        self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into load values(:date, :one , :five, :fifteen)",
                        self.stats_load[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into network values(:date, :records)",
                        self.stats_network[0:middle])

                    log.info("Stats saved in database file.")
                    nrRow = int(
                        self.database_thread_cursor.execute(
                            "select count(*) from cpu").fetchone()[0])
                    if nrRow > self.max_rows_before_purge * 1.5:
                        self.database_thread_cursor.execute(
                            "DELETE FROM cpu WHERE collection_date IN (SELECT collection_date FROM cpu ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM memory WHERE collection_date IN (SELECT collection_date FROM memory ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM load WHERE collection_date IN (SELECT collection_date FROM load ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM network WHERE collection_date IN (SELECT collection_date FROM network ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        log.debug(
                            "Old stored stats have been purged from database.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))
Exemple #3
0
 def did_publish_item(self, conn, response, callback, item):
     """
     Triggered on response.
     """
     log.debug("PUBSUB: item published is node %s" % self.nodename)
     if response.getType() == "result":
         item.setAttr("id", response.getTag("pubsub").getTag("publish").getTag("item").getAttr("id"))
         self.content.append(item)
     if callback:
         callback(response)
Exemple #4
0
 def get_collected_stats(self, limit=1):
     """
     This method returns the current L{TNArchipelVirtualMachine} instance.
     @type limit: integer
     @param limit: the max number of row to get
     @rtype: TNArchipelVirtualMachine
     @return: the L{TNArchipelVirtualMachine} instance
     """
     log.debug("STATCOLLECTOR: Retrieving last " + str(limit) +
               " recorded stats data for sending.")
     try:
         uptime = self.get_uptime()
         uptime_stats = {
             "up": "%dd %dh" % (uptime[0], uptime[1])
         }  #TODO: it's obvious it would be better to not do this
     except Exception as ex:
         raise Exception("Unable to get uptime.", ex)
     try:
         acpu = self.stats_CPU[-limit:]
     except Exception as ex:
         raise Exception("Unable to get CPU stats.", ex)
     try:
         amem = self.stats_memory[-limit:]
     except Exception as ex:
         raise Exception("Unable to get memory.", ex)
     try:
         anetwork = self.stats_network[-limit:]
     except Exception as ex:
         raise Exception("Unable to get networks.", ex)
     try:
         adisk = sorted(self.get_disk_stats(),
                        cmp=lambda x, y: cmp(x["mount"], y["mount"]))
         totalDisk = self.get_disk_total()
     except Exception as ex:
         raise Exception("Unable to get disks information.", ex)
     try:
         aload = self.stats_load[-limit:]
     except Exception as ex:
         raise Exception("Unable to get disks information.", ex)
     if limit > 1:
         acpu.reverse()
         amem.reverse()
         aload.reverse()
         anetwork.reverse()
     return {
         "cpu": acpu,
         "memory": amem,
         "disk": adisk,
         "totaldisk": totalDisk,
         "load": aload,
         "uptime": uptime_stats,
         "uname": self.uname_stats,
         "network": anetwork
     }
 def get_collected_stats(self, limit=1):
     """
     This method returns the current L{TNArchipelVirtualMachine} instance.
     @type limit: integer
     @param limit: the max number of row to get
     @rtype: TNArchipelVirtualMachine
     @return: the L{TNArchipelVirtualMachine} instance
     """
     log.debug("STATCOLLECTOR: Retrieving last " + str(limit) + " recorded stats data for sending.")
     try:
         uptime = self.get_uptime()
         uptime_stats = {
             "up": "%dd %dh" % (uptime[0], uptime[1])
         }  # TODO: it's obvious it would be better to not do this
     except Exception as ex:
         raise Exception("Unable to get uptime.", ex)
     try:
         acpu = self.stats_CPU[-limit:]
     except Exception as ex:
         raise Exception("Unable to get CPU stats.", ex)
     try:
         amem = self.stats_memory[-limit:]
     except Exception as ex:
         raise Exception("Unable to get memory.", ex)
     try:
         anetwork = self.stats_network[-limit:]
     except Exception as ex:
         raise Exception("Unable to get networks.", ex)
     try:
         adisk = sorted(self.get_disk_stats(), cmp=lambda x, y: cmp(x["mount"], y["mount"]))
         totalDisk = self.get_disk_total()
     except Exception as ex:
         raise Exception("Unable to get disks information.", ex)
     try:
         aload = self.stats_load[-limit:]
     except Exception as ex:
         raise Exception("Unable to get disks information.", ex)
     if limit > 1:
         acpu.reverse()
         amem.reverse()
         aload.reverse()
         anetwork.reverse()
     return {
         "cpu": acpu,
         "memory": amem,
         "disk": adisk,
         "totaldisk": totalDisk,
         "load": aload,
         "uptime": uptime_stats,
         "uname": self.uname_stats,
         "network": anetwork,
     }
Exemple #6
0
 def did_remove_item(self, conn, response, callback, user_info):
     """
     Triggered on response.
     """
     if response.getType() == "result":
         log.debug("PUBSUB: retract done. Answer is: %s" % str(response))
         ret = True
     else:
         log.error("PUBSUB: cannot retract item: %s" % response)
         ret = False
     if callback:
         return callback(response, user_info)
     return ret
Exemple #7
0
 def did_publish_item(self, conn, response, callback, item):
     """
     Triggered on response.
     """
     log.debug("PUBSUB: item published is node %s" % self.nodename)
     if response.getType() == "result":
         item.setAttr("id", response.getTag("pubsub").getTag("publish").getTag("item").getAttr("id"))
         self.content.append(item)
         ret = True
     else:
         log.error("PUBSUB: cannot publish item: %s" % response)
         ret = False
     if callback:
         return callback(response)
     return ret
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        def connect():
            self.database_thread_connection = sqlite3.connect(self.database_file)
            self.database_thread_cursor = self.database_thread_connection.cursor()
        connect()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")
                    nrRow = int(self.database_thread_cursor.execute("select count(*) from cpu").fetchone()[0])
                    if nrRow > self.max_rows_before_purge * 1.5:
                        self.database_thread_cursor.execute("DELETE FROM cpu WHERE collection_date IN (SELECT collection_date FROM cpu ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM memory WHERE collection_date IN (SELECT collection_date FROM memory ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM load WHERE collection_date IN (SELECT collection_date FROM load ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM network WHERE collection_date IN (SELECT collection_date FROM network ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        log.debug("Old stored stats have been purged from database.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                if str(ex) == "disk I/O error":
                    log.error("Stat collection fails. Exception %s (try to reopenning it)" % str(ex))
                    connect()
                else:
                    log.error("Stat collection fails. Exception %s" % str(ex))
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(self.database_thread_cursor.execute("select count(*) from memory").fetchone()[0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute("delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from load where collection_date=(select collection_date from load order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from network where collection_date=(select collection_date from network order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        log.debug("Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))
Exemple #10
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while (1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany(
                        "insert into memory values(:date, :free, :used, :total, :swapped, :shared)",
                        self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into cpu values(:date, :id)",
                        self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into load values(:date, :one , :five, :fifteen)",
                        self.stats_load[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into network values(:date, :records)",
                        self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(
                            self.database_thread_cursor.execute(
                                "select count(*) from memory").fetchone()
                        [0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute(
                            "delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from load where collection_date=(select collection_date from load order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from network where collection_date=(select collection_date from network order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        log.debug(
                            "Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))