def __init__(self, parsed_url): super(MonascaPublisher, self).__init__(parsed_url) # list to hold metrics to be published in batch (behaves like queue) self.metric_queue = [] self.time_of_last_batch_run = time.time() self.mon_client = mon_client.Client(parsed_url) self.mon_filter = MonascaDataFilter() batch_timer = loopingcall.FixedIntervalLoopingCall(self.flush_batch) batch_timer.start(interval=cfg.CONF.monasca.batch_polling_interval) if cfg.CONF.monasca.retry_on_failure: # list to hold metrics to be re-tried (behaves like queue) self.retry_queue = [] # list to store retry attempts for metrics in retry_queue self.retry_counter = [] retry_timer = loopingcall.FixedIntervalLoopingCall( self.retry_batch) retry_timer.start( interval=cfg.CONF.monasca.retry_interval, initial_delay=cfg.CONF.monasca.batch_polling_interval) if cfg.CONF.monasca.archive_on_failure: archive_path = cfg.CONF.monasca.archive_path if not os.path.exists(archive_path): archive_path = cfg.CONF.find_file(archive_path) self.archive_handler = publisher.get_publisher('file://' + str(archive_path))
def __init__(self, conf, parsed_url): super(MonascaPublisher, self).__init__(conf, parsed_url) # list to hold metrics to be published in batch (behaves like queue) self.metric_queue = [] self.time_of_last_batch_run = time.time() self.mon_client = mon_client.Client(self.conf, parsed_url) self.mon_filter = MonascaDataFilter(self.conf) # add flush_batch function to periodic callables periodic_callables = [ # The function to run + any automatically provided # positional and keyword arguments to provide to it # everytime it is activated. (self.flush_batch, (), {}), ] if self.conf.monasca.retry_on_failure: # list to hold metrics to be re-tried (behaves like queue) self.retry_queue = [] # list to store retry attempts for metrics in retry_queue self.retry_counter = [] # add retry_batch function to periodic callables periodic_callables.append((self.retry_batch, (), {})) if self.conf.monasca.archive_on_failure: archive_path = self.conf.monasca.archive_path if not os.path.exists(archive_path): archive_path = self.conf.find_file(archive_path) self.archive_handler = publisher.get_publisher( self.conf, 'file://' + str(archive_path), 'ceilometer.sample.publisher') # start periodic worker self.periodic_worker = periodics.PeriodicWorker(periodic_callables) self.periodic_thread = threading.Thread( target=self.periodic_worker.start) self.periodic_thread.daemon = True self.periodic_thread.start()
def test_client_url_correctness(self, ksclass_mock, monclient_mock): ksclient_mock = ksclass_mock.return_value ksclient_mock.token.return_value = "token123" mon_client = monasca_client.Client( netutils.urlsplit("monasca://https://127.0.0.1:8080")) self.assertEqual("https://127.0.0.1:8080", mon_client._endpoint)
def _get_client(self, ksclass_mock, monclient_mock): ksclient_mock = ksclass_mock.return_value ksclient_mock.token.return_value = "token123" return monasca_client.Client( netutils.urlsplit("http://127.0.0.1:8080"))
def __init__(self, url): self.mc = monasca_client.Client(netutils.urlsplit(url)) self.mon_filter = MonascaDataFilter()
def test_client_url_correctness(self, monclient_mock): mon_client = monasca_client.Client( self.CONF, netutils.urlsplit("monasca://https://127.0.0.1:8080")) self.assertEqual("https://127.0.0.1:8080", mon_client._endpoint)
def _get_client(self, monclient_mock): return monasca_client.Client( self.CONF, netutils.urlsplit("http://127.0.0.1:8080"))