Example #1
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--sleep', required=False, default=0, type=float)
    parser.add_argument('--queue', required=False, default=default_queue, type=str)
    parser.add_argument('--runas', required=False, default=default_user, type=str)
    parser.add_argument('--purge', required=False, action='store_true', default=False)
    parser.add_argument('--noout', required=False, action='store_true', default=False)
    parser.add_argument('--num', required=False, default=0, type=int)
    global args
    args = parser.parse_args()

    seteuser(pwd.getpwnam(args.runas))

    msgs = []
    mq = DQS(path=args.queue)
    try:
        if args.purge:
            mq.purge()
        if args.sleep > 0:
            while True:
                consume_queue(mq, args.num)
                time.sleep(args.sleep)
        else:
            consume_queue(mq, args.num)

    except KeyboardInterrupt as e:
        raise SystemExit(0)
Example #2
0
class CMSSWMonCollector(Service):
    """                                                                                                                                                                                            
    Class definition of the dashboard CMSSWMonCollector agent.                                                                                                                                     
    """
    _logger = logging.getLogger("dashboard.collector.CMSSWMonCollector")

    def __init__(self, name, configFile):
        """
        Initializer for the object.
        """
        Service.__init__(self, name, configFile)

        # Hourly purge
        self.PURGE_INTERVAL = 3600

        # DB Table where to store the messages
        self.transfers_db_table = self.param('transfersDBTable')
        self.transfers_db_table_rejected = self.param('rejectedDBTable')
        # Maximum number of messages in the buffer when making a bulk insert 
        self.buffer_size = int(self.param('bufferSize'))

        self.id = self.param('id')
        self.dbsection = self.param('dbsection')  

        self._next_purge = time.time() + self.PURGE_INTERVAL

        # Try to read the local queue
        try:
            self.localQueue = DQS(path = self.param('localQueue'))
        except Exception as e:
            self._logger.error("connection to the local queue failed")

    def run(self):
        """
        Main function of the service. While it is running it inserts messages 
        from the messaging server into the database.
        """
        while self.status() is not None:
            (names, bodies) = ([], [])
            msgCount = 0
            #try:
            for name in self.localQueue:
                if self.localQueue.lock(name):
                    msg = self.localQueue.get_message(name)
                    self.decode_message(msg, bodies)
                    names.append(name)
                    msgCount += 1

                    # Exit the loop when X messages collected
                    if (msgCount >= self.buffer_size):
                        break

            (successes, failures, ellapsed_time, bulk) = self.insert_messages(names, bodies)
            self._logger.info(
                "%d messages to insert for %s, %d successfully and %d failed in %d ms (bulk = %s)"
                % (msgCount, self.id, successes, failures, ellapsed_time, str(bulk))
            )

            self.purge() 

            # Prevent the buffer to run continuously when buffer is not full
            if msgCount != self.buffer_size:
                time.sleep(5)
    
    
    def JSON_format(self, message):
        """
        Decodes messages in JSON format to a dictionary python
        """
        if message.find(chr(4)): # If the last character is an ascii End of Transmission character we need to remove it
            return json.loads(message.split(chr(4))[0])
        else: return json.loads(message)
    
    
    def delete_messages(self, names):
        """
        """
        for name in names:
            self.localQueue.remove(name)

    def purge(self):
        if time.time() < self._next_purge:
            return
        self.localQueue.purge(60, 60)  
        self._next_purge = time.time() + self.PURGE_INTERVAL

    def validate_length(self, bodies):
        a = [len(x.keys()) for x in bodies]
        m = max(a)
        if a[0] < m:
            idx = a.index(m)
            bodies[0], bodies[idx] = bodies[idx], bodies[0]
            self._logger.warning("swap message positions 0 and %s. Missing keys %s" % (idx, [x for x in bodies[0].keys() if x not in bodies[idx].keys()]))
        return bodies
    
    def insert_messages(self, names, bodies):
        """
        """
        start = time.time()
        successes, failures, ellapsed_time, is_bulk = 0, 0, 0, True

        (ctx, dao) = (None, None)
        try:
            # Get a site DAO to work with
            ctx = DAOContext.getDAOContext(section=self.dbsection) 
            dao = DAOFactory.getDAOFactory().getDAOObject(ctx, 'xrootd', 'XRootDDAO')
            
            # Try to make a bulk insert 
            if len(bodies) > 0:
                try:
                    bodies = self.validate_length(bodies)
                    dao.insertMessages(bodies, self.transfers_db_table)
                    successes = len(bodies)
                except Exception as msg:
                    is_bulk = False
                    self._logger.warning("couldn't feed all the data: %s" % msg)
                    self._logger.warning("failed to insert %s messages. Inserting messages one by one" % len(bodies))

                    # Try to insert the messages one by one if any exception
                    for body in bodies: 
                        try:
                            dao.insertMessages(body, self.transfers_db_table)
                            successes += 1
                        except Exception as msg:
                            failures += 1

                            # Try to insert the malformed message in a table without any constraint
                            if self.transfers_db_table_rejected is not None:
                                try:
                                    body['exception'] = str(msg)
                                    dao.insertMessages(body, self.transfers_db_table_rejected)
                                except:
                                    self._logger.warning("Couldn't feed data: %s" % msg)

            ctx.commit()
            self.delete_messages(names)

        except Exception as msg:
            # maybe it would be necessary to manage if something is wrong in the database (downtime for instance)
            self._logger.error("%s" % msg)
            ctx.destroy()
            raise Exception
        end = time.time()
        ms = 1000 * (end - start)
        return (successes, failures, int(ms), is_bulk)

    def decode_message(self, message, bodies):
        """
        """
        try:
            body = message.get_body()
            body = body.replace(', ,', ',')
            msgDict = self.JSON_format(body)
            try:
                if msgDict['fallback'] == True:
                    msgDict['fallback'] = '1'
                else:
                    msgDict['fallback'] = '0'
            except:
                msgDict['fallback'] = '-'

            # convert time since Epoch to datetime
            msgDict['start_date' ]  = datetime.utcfromtimestamp(int( msgDict['start_time'] ) )
            msgDict['end_date'   ]  = datetime.utcfromtimestamp(int( msgDict['end_time'] ) )

            #self._logger.info(msgDict)
        
            bodies.append(msgDict)

        except ValueError as msg:
            self._logger.warning("Impossible to decode the message: %s by JSON" % message)
            self._logger.error(msg)
            #raise msg
        except Exception as msg:
            self._logger.warning("Exception: %s" % msg)
class CMSSWMonCollector(Service):
    """                                                                                                                                                                                            
    Class definition of the dashboard CMSSWMonCollector agent.                                                                                                                                     
    """
    _logger = logging.getLogger("dashboard.collector.CMSSWMonCollector")

    def __init__(self, name, configFile):
        """
        Initializer for the object.
        """
        Service.__init__(self, name, configFile)

        # Hourly purge
        self.PURGE_INTERVAL = 3600

        # DB Table where to store the messages
        self.transfers_db_table = self.param('transfersDBTable')
        self.transfers_db_table_rejected = self.param('rejectedDBTable')
        # Maximum number of messages in the buffer when making a bulk insert 
        self.buffer_size = int(self.param('bufferSize'))

        self.id = self.param('id')
        self.dbsection = self.param('dbsection')  

        self._next_purge = time.time() + self.PURGE_INTERVAL

        # Try to read the local queue
        try:
            self.localQueue = DQS(path = self.param('localQueue'))
        except Exception as e:
            self._logger.error("connection to the local queue failed")

    def run(self):
        """
        Main function of the service. While it is running it inserts messages 
        from the messaging server into the database.
        """
        while self.status() is not None:
            (names, bodies) = ([], [])
            msgCount = 0
            #try:
            for name in self.localQueue:
                if self.localQueue.lock(name):
                    msg = self.localQueue.get_message(name)
                    self.decode_message(msg, bodies)
                    names.append(name)
                    msgCount += 1

                    # Exit the loop when X messages collected
                    if (msgCount >= self.buffer_size):
                        break

            (successes, failures, ellapsed_time, bulk) = self.insert_messages(names, bodies)
            self._logger.info(
                "%d messages to insert for %s, %d successfully and %d failed in %d ms (bulk = %s)"
                % (msgCount, self.id, successes, failures, ellapsed_time, str(bulk))
            )

            self.purge() 

            # Prevent the buffer to run continuously when buffer is not full
            if msgCount != self.buffer_size:
                time.sleep(5)
    
    
    def JSON_format(self, message):
        """
        Decodes messages in JSON format to a dictionary python
        """
        if message.find(chr(4)): # If the last character is an ascii End of Transmission character we need to remove it
            return json.loads(message.split(chr(4))[0])
        else: return json.loads(message)
    
    
    def delete_messages(self, names):
        """
        """
        for name in names:
            self.localQueue.remove(name)

    def purge(self):
        if time.time() < self._next_purge:
            return
        self.localQueue.purge(60, 60)  
        self._next_purge = time.time() + self.PURGE_INTERVAL

    def validate_length(self, bodies):
        a = [len(x.keys()) for x in bodies]
        m = max(a)
        if a[0] < m:
            idx = a.index(m)
            bodies[0], bodies[idx] = bodies[idx], bodies[0]
            self._logger.warning("swap message positions 0 and %s. Missing keys %s" % (idx, [x for x in bodies[0].keys() if x not in bodies[idx].keys()]))
        return bodies
    
    def insert_messages(self, names, bodies):
        """
        """
        start = time.time()
        successes, failures, ellapsed_time, is_bulk = 0, 0, 0, True

        (ctx, dao) = (None, None)
        try:
            # Get a site DAO to work with
            ctx = DAOContext.getDAOContext(section=self.dbsection) 
            dao = DAOFactory.getDAOFactory().getDAOObject(ctx, 'xrootd', 'XRootDDAO')
            
            # Try to make a bulk insert 
            if len(bodies) > 0:
                try:
                    bodies = self.validate_length(bodies)
                    dao.insertMessages(bodies, self.transfers_db_table)
                    successes = len(bodies)
                except Exception as msg:
                    is_bulk = False
                    self._logger.warning("couldn't feed all the data: %s" % msg)
                    self._logger.warning("failed to insert %s messages. Inserting messages one by one" % len(bodies))

                    # Try to insert the messages one by one if any exception
                    for body in bodies: 
                        try:
                            dao.insertMessages(body, self.transfers_db_table)
                            successes += 1
                        except Exception as msg:
                            failures += 1

                            # Try to insert the malformed message in a table without any constraint
                            if self.transfers_db_table_rejected is not None:
                                try:
                                    body['exception'] = str(msg)
                                    dao.insertMessages(body, self.transfers_db_table_rejected)
                                except:
                                    self._logger.warning("Couldn't feed data: %s" % msg)

            ctx.commit()
            self.delete_messages(names)

        except Exception as msg:
            # maybe it would be necessary to manage if something is wrong in the database (downtime for instance)
            self._logger.error("%s" % msg)
            ctx.destroy()
            raise Exception
        end = time.time()
        ms = 1000 * (end - start)
        return (successes, failures, int(ms), is_bulk)

    def decode_message(self, message, bodies):
        """
        """
        try:
            body = message.get_body()
            body = body.replace(', ,', ',')
            body = body.replace(':-nan,', ':null,').replace(':nan,', ':null,')
            msgDict = self.JSON_format(body)
            try:
                if msgDict['fallback'] == True:
                    msgDict['fallback'] = '1'
                else:
                    msgDict['fallback'] = '0'
            except:
                msgDict['fallback'] = '-'

            # convert time since Epoch to datetime
            msgDict['start_date' ]  = datetime.utcfromtimestamp(int( msgDict['start_time'] ) )
            msgDict['end_date'   ]  = datetime.utcfromtimestamp(int( msgDict['end_time'] ) )

            #self._logger.info(msgDict)
        
            bodies.append(msgDict)

        except ValueError as msg:
            self._logger.warning("Impossible to decode the message: %s by JSON" % message)
            self._logger.error(msg)
            #raise msg
        except Exception as msg:
            self._logger.warning("Exception: %s" % msg)