Ejemplo n.º 1
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while (1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany(
                        "insert into memory values(:date, :free, :used, :total, :swapped, :shared)",
                        self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into cpu values(:date, :id)",
                        self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into load values(:date, :one , :five, :fifteen)",
                        self.stats_load[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into network values(:date, :records)",
                        self.stats_network[0:middle])

                    log.info("Stats saved in database file.")
                    nrRow = int(
                        self.database_thread_cursor.execute(
                            "select count(*) from cpu").fetchone()[0])
                    if nrRow > self.max_rows_before_purge * 1.5:
                        self.database_thread_cursor.execute(
                            "DELETE FROM cpu WHERE collection_date IN (SELECT collection_date FROM cpu ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM memory WHERE collection_date IN (SELECT collection_date FROM memory ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM load WHERE collection_date IN (SELECT collection_date FROM load ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "DELETE FROM network WHERE collection_date IN (SELECT collection_date FROM network ORDER BY collection_date ASC LIMIT "
                            + str(nrRow - self.max_rows_before_purge) + ")")
                        log.debug(
                            "Old stored stats have been purged from database.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))
Ejemplo n.º 2
0
 def on_pubsub_event(self, conn, event):
     """
     Trigger the callback for events.
     """
     try:
         node = event.getTag("event").getTag("items").getAttr("node")
         if node == self.nodename and self.subscriber_callback and event.getTo() == self.subscriber_jid:
             self.subscriber_callback(event)
     except Exception as ex:
         log.error("Error in on_pubsub_event: %s" % str(ex))
Ejemplo n.º 3
0
 def recover(self, wait=False):
     """
     Get the current pubsub node and wait for response. If not already recovered, ask to server.
     @type wait: Boolean
     @param wait: if True, recovering will be blockant (IE, execution interrupted until recovering)
     @rtype: Boolean
     @return: True in case of success
     """
     try:
         return self.retrieve_items(wait=wait)
     except Exception as ex:
         log.error("PUBSUB: can't get node %s : %s" % (self.nodename, str(ex)))
         return False
Ejemplo n.º 4
0
 def did_remove_item(self, conn, response, callback, user_info):
     """
     Triggered on response.
     """
     if response.getType() == "result":
         log.debug("PUBSUB: retract done. Answer is: %s" % str(response))
         ret = True
     else:
         log.error("PUBSUB: cannot retract item: %s" % response)
         ret = False
     if callback:
         return callback(response, user_info)
     return ret
Ejemplo n.º 5
0
 def _did_configure(self, conn, resp):
     """
     Called when node has been configured.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been configured." % self.nodename)
             return True
         else:
             log.error("PUBSUB: can't configure pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to configure pubsub node: %s" % str(ex))
Ejemplo n.º 6
0
 def _did_delete(self, conn, resp):
     """
     Called after pubsub deletion.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been deleted." % self.nodename)
             return True
         else:
             log.error("PUBSUB: can't delete pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to delete pubsub node: %s" % str(ex))
Ejemplo n.º 7
0
 def _did_create(self, conn, resp):
     """
     Called after pubsub creation.
     """
     try:
         if resp.getType() == "result":
             log.info("PUBSUB: pubsub node %s has been created." % self.nodename)
             return self.recover(wait=True)
         else:
             log.error("PUBSUB: can't create pubsub: %s" % str(resp))
             return False
     except Exception as ex:
         log.error("PUBSUB: unable to create pubsub node: %s" % str(ex))
Ejemplo n.º 8
0
 def did_publish_item(self, conn, response, callback, item):
     """
     Triggered on response.
     """
     log.debug("PUBSUB: item published is node %s" % self.nodename)
     if response.getType() == "result":
         item.setAttr("id", response.getTag("pubsub").getTag("publish").getTag("item").getAttr("id"))
         self.content.append(item)
         ret = True
     else:
         log.error("PUBSUB: cannot publish item: %s" % response)
         ret = False
     if callback:
         return callback(response)
     return ret
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        def connect():
            self.database_thread_connection = sqlite3.connect(self.database_file)
            self.database_thread_cursor = self.database_thread_connection.cursor()
        connect()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")
                    nrRow = int(self.database_thread_cursor.execute("select count(*) from cpu").fetchone()[0])
                    if nrRow > self.max_rows_before_purge * 1.5:
                        self.database_thread_cursor.execute("DELETE FROM cpu WHERE collection_date IN (SELECT collection_date FROM cpu ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM memory WHERE collection_date IN (SELECT collection_date FROM memory ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM load WHERE collection_date IN (SELECT collection_date FROM load ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute("DELETE FROM network WHERE collection_date IN (SELECT collection_date FROM network ORDER BY collection_date ASC LIMIT " + str(nrRow - self.max_rows_before_purge) + ")")
                        log.debug("Old stored stats have been purged from database.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                if str(ex) == "disk I/O error":
                    log.error("Stat collection fails. Exception %s (try to reopenning it)" % str(ex))
                    connect()
                else:
                    log.error("Stat collection fails. Exception %s" % str(ex))
Ejemplo n.º 10
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while(1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany("insert into memory values(:date, :free, :used, :total, :swapped, :shared)", self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany("insert into cpu values(:date, :id)", self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany("insert into load values(:date, :one , :five, :fifteen)", self.stats_load[0:middle])
                    self.database_thread_cursor.executemany("insert into network values(:date, :records)", self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(self.database_thread_cursor.execute("select count(*) from memory").fetchone()[0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute("delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from load where collection_date=(select collection_date from load order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        self.database_thread_cursor.execute("delete from network where collection_date=(select collection_date from network order by collection_date asc limit "+ str(self.max_rows_before_purge) +")")
                        log.debug("Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))
Ejemplo n.º 11
0
    def run(self):
        """
        Overrides super class method. do the L{TNArchipelVirtualMachine} main loop.
        """
        self.database_thread_connection = sqlite3.connect(self.database_file)
        self.database_thread_cursor = self.database_thread_connection.cursor()
        while (1):
            try:
                self.stats_CPU.append(self.get_cpu_stats())
                self.stats_memory.append(self.get_memory_stats())
                self.stats_load.append(self.get_load_stats())
                self.stats_network.append(self.get_network_stats())

                if len(self.stats_CPU) >= self.max_cached_rows:
                    middle = (self.max_cached_rows - 1) / 2

                    self.database_thread_cursor.executemany(
                        "insert into memory values(:date, :free, :used, :total, :swapped, :shared)",
                        self.stats_memory[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into cpu values(:date, :id)",
                        self.stats_CPU[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into load values(:date, :one , :five, :fifteen)",
                        self.stats_load[0:middle])
                    self.database_thread_cursor.executemany(
                        "insert into network values(:date, :records)",
                        self.stats_network[0:middle])

                    log.info("Stats saved in database file.")

                    if int(
                            self.database_thread_cursor.execute(
                                "select count(*) from memory").fetchone()
                        [0]) >= self.max_rows_before_purge * 2:
                        self.database_thread_cursor.execute(
                            "delete from cpu where collection_date=(select collection_date from cpu order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from memory where collection_date=(select collection_date from memory order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from load where collection_date=(select collection_date from load order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        self.database_thread_cursor.execute(
                            "delete from network where collection_date=(select collection_date from network order by collection_date asc limit "
                            + str(self.max_rows_before_purge) + ")")
                        log.debug(
                            "Old stored stats have been purged from memory.")

                    del self.stats_CPU[0:middle]
                    del self.stats_memory[0:middle]
                    del self.stats_load[0:middle]
                    del self.stats_network[0:middle]
                    log.info("Cached stats have been purged from memory.")

                    self.database_thread_connection.commit()

                time.sleep(self.collection_interval)
            except Exception as ex:
                log.error("Stat collection fails. Exception %s" % str(ex))