def test_write_marker_to_db_marker_exists(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [ isotime.parse(execution.end_timestamp) for execution in self.execution_apis ] max_timestamp = max(timestamps) first_marker_db = dumper._write_marker_to_db(max_timestamp) second_marker_db = dumper._write_marker_to_db(max_timestamp + datetime.timedelta( hours=1)) markers = DumperMarker.get_all() self.assertEqual(len(markers), 1) final_marker_id = markers[0].id self.assertEqual(first_marker_db.id, final_marker_id) self.assertEqual(second_marker_db.id, final_marker_id) self.assertEqual(markers[0].marker, second_marker_db.marker) self.assertTrue( second_marker_db.updated_at > first_marker_db.updated_at)
def test_write_to_disk_empty_queue(self): dumper = Dumper(queue=queue.Queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, 0)
def test_write_to_disk_empty_queue(self): dumper = Dumper(queue=queue.Queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, 0)
def test_update_marker(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') # Batch 1 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) self.assertTrue(new_marker is not None) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) # Batch 2 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def __init__(self, connection, queues): super(ExecutionsExporter, self).__init__(connection, queues) self.pending_executions = queue.Queue() self._dumper = Dumper( queue=self.pending_executions, export_dir=cfg.CONF.exporter.dump_dir ) self._consumer_thread = None
def test_update_marker_out_of_order_batch(self): executions_queue = self.get_queue() dumper = Dumper( queue=executions_queue, export_dir="/tmp", batch_size=5, max_files_per_sleep=1, file_prefix="st2-stuff-", file_format="json", ) timestamps = [ isotime.parse(execution.end_timestamp) for execution in self.execution_apis ] max_timestamp = max(timestamps) # set dumper persisted timestamp to something less than min timestamp in the batch test_timestamp = max_timestamp + datetime.timedelta(hours=1) dumper._persisted_marker = test_timestamp new_marker = dumper._update_marker(self.execution_apis) self.assertTrue(new_marker < test_timestamp) # Assert we rolled back the marker. self.assertEqual(dumper._persisted_marker, max_timestamp) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def test_get_file_name(self): dumper = Dumper(queue=self.get_queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime('%Y-%m-%d') self.assertTrue(file_name.startswith('/tmp/' + export_date + '/st2-stuff-')) self.assertTrue(file_name.endswith('json'))
def test_get_file_name(self): dumper = Dumper(queue=self.get_queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime('%Y-%m-%d') self.assertTrue(file_name.startswith('/tmp/' + export_date + '/st2-stuff-')) self.assertTrue(file_name.endswith('json'))
def test_get_batch_batch_size_greater_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) dumper = Dumper(queue=executions_queue, batch_size=2 * qsize, export_dir='/tmp') batch = dumper._get_batch() self.assertEqual(len(batch), qsize)
def test_get_batch_batch_size_greater_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) dumper = Dumper(queue=executions_queue, batch_size=2 * qsize, export_dir='/tmp') batch = dumper._get_batch() self.assertEqual(len(batch), qsize)
def test_write_to_disk(self): executions_queue = self.get_queue() max_files_per_sleep = 5 dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=1, max_files_per_sleep=max_files_per_sleep, file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, max_files_per_sleep)
def test_write_to_disk(self): executions_queue = self.get_queue() max_files_per_sleep = 5 dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=1, max_files_per_sleep=max_files_per_sleep, file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, max_files_per_sleep)
def test_start_stop_dumper(self): executions_queue = self.get_queue() sleep_interval = 0.01 dumper = Dumper(queue=executions_queue, sleep_interval=sleep_interval, export_dir='/tmp', batch_size=1, max_files_per_sleep=5, file_prefix='st2-stuff-', file_format='json') dumper.start() # Call stop after at least one batch was written to disk. eventlet.sleep(10 * sleep_interval) dumper.stop()
def test_get_batch_batch_size_lesser_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) expected_batch_size = int(qsize / 2) dumper = Dumper(queue=executions_queue, batch_size=expected_batch_size, export_dir="/tmp") batch = dumper._get_batch() self.assertEqual(len(batch), expected_batch_size)
def test_write_marker_to_db(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) marker_db = dumper._write_marker_to_db(max_timestamp) persisted_marker = marker_db.marker self.assertTrue(isinstance(persisted_marker, six.string_types)) self.assertEqual(isotime.parse(persisted_marker), max_timestamp)
def test_get_file_name(self): dumper = Dumper( queue=self.get_queue(), export_dir="/tmp", file_prefix="st2-stuff-", file_format="json", ) file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime("%Y-%m-%d") self.assertTrue( file_name.startswith("/tmp/" + export_date + "/st2-stuff-")) self.assertTrue(file_name.endswith("json"))
def test_write_marker_to_db(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) marker_db = dumper._write_marker_to_db(max_timestamp) persisted_marker = marker_db.marker self.assertTrue(isinstance(persisted_marker, six.string_types)) self.assertEqual(isotime.parse(persisted_marker), max_timestamp)
def test_write_marker_to_db_marker_exists(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) first_marker_db = dumper._write_marker_to_db(max_timestamp) second_marker_db = dumper._write_marker_to_db(max_timestamp + datetime.timedelta(hours=1)) markers = DumperMarker.get_all() self.assertEqual(len(markers), 1) final_marker_id = markers[0].id self.assertEqual(first_marker_db.id, final_marker_id) self.assertEqual(second_marker_db.id, final_marker_id) self.assertEqual(markers[0].marker, second_marker_db.marker) self.assertTrue(second_marker_db.updated_at > first_marker_db.updated_at)
def test_update_marker_out_of_order_batch(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) # set dumper persisted timestamp to something less than min timestamp in the batch test_timestamp = max_timestamp + datetime.timedelta(hours=1) dumper._persisted_marker = test_timestamp new_marker = dumper._update_marker(self.execution_apis) self.assertTrue(new_marker < test_timestamp) # Assert we rolled back the marker. self.assertEqual(dumper._persisted_marker, max_timestamp) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def test_update_marker(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') # Batch 1 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) self.assertTrue(new_marker is not None) timestamps = [isotime.parse(execution.end_timestamp) for execution in batch] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) # Batch 2 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) timestamps = [isotime.parse(execution.end_timestamp) for execution in batch] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def test_start_stop_dumper(self): executions_queue = self.get_queue() sleep_interval = 0.01 dumper = Dumper(queue=executions_queue, sleep_interval=sleep_interval, export_dir='/tmp', batch_size=1, max_files_per_sleep=5, file_prefix='st2-stuff-', file_format='json') dumper.start() # Call stop after at least one batch was written to disk. eventlet.sleep(10 * sleep_interval) dumper.stop()
def __init__(self, connection, queues): super(ExecutionsExporter, self).__init__(connection, queues) self.pending_executions = Queue.Queue() self._dumper = Dumper(queue=self.pending_executions, export_dir=cfg.CONF.exporter.dump_dir) self._consumer_thread = None
class ExecutionsExporter(consumers.MessageHandler): message_type = ActionExecutionDB def __init__(self, connection, queues): super(ExecutionsExporter, self).__init__(connection, queues) self.pending_executions = Queue.Queue() self._dumper = Dumper(queue=self.pending_executions, export_dir=cfg.CONF.exporter.dump_dir) self._consumer_thread = None def start(self, wait=False): LOG.info('Bootstrapping executions from db...') try: self._bootstrap() except: LOG.exception('Unable to bootstrap executions from db. Aborting.') raise self._consumer_thread = eventlet.spawn(super(ExecutionsExporter, self).start, wait=True) self._dumper.start() if wait: self.wait() def wait(self): self._consumer_thread.wait() self._dumper.wait() def shutdown(self): self._dumper.stop() super(ExecutionsExporter, self).shutdown() def process(self, execution): LOG.debug('Got execution from queue: %s', execution) if execution.status not in COMPLETION_STATUSES: return execution_api = ActionExecutionAPI.from_model(execution, mask_secrets=True) self.pending_executions.put_nowait(execution_api) LOG.debug("Added execution to queue.") def _bootstrap(self): marker = self._get_export_marker_from_db() LOG.info('Using marker %s...' % marker) missed_executions = self._get_missed_executions_from_db(export_marker=marker) LOG.info('Found %d executions not exported yet...', len(missed_executions)) for missed_execution in missed_executions: if missed_execution.status not in COMPLETION_STATUSES: continue execution_api = ActionExecutionAPI.from_model(missed_execution, mask_secrets=True) try: LOG.debug('Missed execution %s', execution_api) self.pending_executions.put_nowait(execution_api) except: LOG.exception('Failed adding execution to in-memory queue.') continue LOG.info('Bootstrapped executions...') def _get_export_marker_from_db(self): try: markers = DumperMarker.get_all() except: return None else: if len(markers) >= 1: marker = markers[0] return isotime.parse(marker.marker) else: return None def _get_missed_executions_from_db(self, export_marker=None): if not export_marker: return self._get_all_executions_from_db() # XXX: Should adapt this query to get only executions with status # in COMPLETION_STATUSES. filters = {'end_timestamp__gt': export_marker} LOG.info('Querying for executions with filters: %s', filters) return ActionExecution.query(**filters) def _get_all_executions_from_db(self): return ActionExecution.get_all() # XXX: Paginated call.