def SendMessagetoMQ(self, msg_body, event):
     # the max size limit in KB but python expects it in bytes
     size_limit = self.maxMQmessageSize * 1000
     size_msg = self.total_size(msg_body)
     # if size of the message is larger than 10MB discarrd
     if size_msg > size_limit:
         self.log.warning(
             "Size of message body bigger than limit, discarding")
         return
     # add to mq
     result = None
     for tries in range(5):
         try:
             result = self.channel.basic_publish(
                 exchange=self.config.get('rabbitmq', 'exchange'),
                 routing_key='perfsonar.raw.' + event,
                 body=json.dumps(msg_body),
                 properties=pika.BasicProperties(delivery_mode=2))
             break
             if not result:
                 raise Exception('ERROR: Exception publishing to rabbit MQ',
                                 'Problem publishing to mq')
         except Exception as e:
             self.log.exception(
                 "Restarting pika connection,, exception was %s, " %
                 (repr(e)))
             ps_collector.get_rabbitmq_connection(
                 self.config).createChannel()
     if result == None:
         self.log.error(
             "ERROR: Failed to send message to mq, exception was %s" %
             (repr(e)))
Beispiel #2
0
    def __init__(self,
                 start=1600,
                 connect='iut2-net3.iu.edu',
                 metricName='org.osg.general.perfsonar-rabbitmq-simple',
                 config=None,
                 log=None):
        Uploader.__init__(self, start, connect, metricName, config, log)

        self.channel = ps_collector.get_rabbitmq_connection(
            config).createChannel()
        self.maxMQmessageSize = self.readConfigFile('mq-max-message-size')
Beispiel #3
0
    def postData(self, arguments, event_types, summaries, summaries_data,
                 metadata_key, datapoints):
        summary = self.summary
        disp = self.debug
        lenght_post = -1
        arguments['org_metadata_key'] = metadata_key
        for event_type in datapoints.keys():
            if len(datapoints[event_type]) > lenght_post:
                lenght_post = len(datapoints[event_type])
        if lenght_post == 0:
            self.log.info("No new datapoints skipping posting for efficiency")
            return

        # Now that we know we have data to send, actually connect upstream.
        if self.channel is None:
            self.channel = ps_collector.get_rabbitmq_connection(
                self.config).createChannel()

        if summaries_data:
            self.log.info("posting new summaries")
            self.publishSToMq(arguments, event_types, summaries,
                              summaries_data)
        step_size = 200
        self.log.info("Length of the post: %s " % lenght_post)
        for step in range(0, lenght_post, step_size):
            chunk_datapoints = {}
            for event_type in datapoints.keys():
                chunk_datapoints[event_type] = {}
                if len(datapoints[event_type].keys()) > 0:
                    pointsconsider = sorted(
                        datapoints[event_type].keys())[step:step + step_size]
                    for point in pointsconsider:
                        chunk_datapoints[event_type][point] = datapoints[
                            event_type][point]
            self.publishRToMq(arguments, event_types, chunk_datapoints)
            # Updating the checkpoint files for each host/metric and metadata
            for event_type in chunk_datapoints.keys():
                if len(chunk_datapoints[event_type].keys()) > 0:
                    if event_type not in self.time_starts:
                        self.time_starts[event_type] = 0
                    next_time_start = max(
                        chunk_datapoints[event_type].keys()) + 1
                    if next_time_start > self.time_starts[event_type]:
                        self.time_starts[event_type] = int(next_time_start)

            self.writeCheckpoint(metadata_key, self.time_starts)
            self.log.info("posting NEW METADATA/DATA to rabbitMQ %s" %
                          metadata_key)
 def SendMessagetoMQ(self, msg_body, event):
     # the max size limit in KB but python expects it in bytes
     size_limit = self.maxMQmessageSize * 1000
     #json_str = msg_body.dumps(msg_body)
     # if size of the message is larger than 10MB discarrd
     #if size_msg > size_limit:
     #    self.log.warning("Size of message body bigger than limit, discarding")
     #    return
     # add to mq
     result = None
     for tries in range(5):
         try:
             self.channel.basic_publish(
                 exchange=self.config.get('rabbitmq', 'exchange'),
                 routing_key='perfsonar.raw.' + event,
                 body=json.dumps(msg_body),
                 properties=pika.BasicProperties(delivery_mode=2))
             break
         except Exception as e:
             self.log.exception(
                 "Restarting pika connection,, exception was %s, " %
                 (repr(e)))
             self.channel = ps_collector.get_rabbitmq_connection(
                 self.config).createChannel()