def __init__(self, parsed_url):
        super(MonascaPublisher, self).__init__(parsed_url)

        # list to hold metrics to be published in batch (behaves like queue)
        self.metric_queue = []
        self.time_of_last_batch_run = time.time()

        self.mon_client = mon_client.Client(parsed_url)
        self.mon_filter = MonascaDataFilter()

        batch_timer = loopingcall.FixedIntervalLoopingCall(self.flush_batch)
        batch_timer.start(interval=cfg.CONF.monasca.batch_polling_interval)

        if cfg.CONF.monasca.retry_on_failure:
            # list to hold metrics to be re-tried (behaves like queue)
            self.retry_queue = []
            # list to store retry attempts for metrics in retry_queue
            self.retry_counter = []
            retry_timer = loopingcall.FixedIntervalLoopingCall(
                self.retry_batch)
            retry_timer.start(
                interval=cfg.CONF.monasca.retry_interval,
                initial_delay=cfg.CONF.monasca.batch_polling_interval)

        if cfg.CONF.monasca.archive_on_failure:
            archive_path = cfg.CONF.monasca.archive_path
            if not os.path.exists(archive_path):
                archive_path = cfg.CONF.find_file(archive_path)

            self.archive_handler = publisher.get_publisher('file://' +
                                                           str(archive_path))
Example #2
0
 def add_timer(self,
               interval,
               callback,
               initial_delay=None,
               *args,
               **kwargs):
     pulse = loopingcall.FixedIntervalLoopingCall(callback, *args, **kwargs)
     pulse.start(interval=interval, initial_delay=initial_delay)
     self.timers.append(pulse)