def test_write_marker_to_db_marker_exists(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [ isotime.parse(execution.end_timestamp) for execution in self.execution_apis ] max_timestamp = max(timestamps) first_marker_db = dumper._write_marker_to_db(max_timestamp) second_marker_db = dumper._write_marker_to_db(max_timestamp + datetime.timedelta( hours=1)) markers = DumperMarker.get_all() self.assertEqual(len(markers), 1) final_marker_id = markers[0].id self.assertEqual(first_marker_db.id, final_marker_id) self.assertEqual(second_marker_db.id, final_marker_id) self.assertEqual(markers[0].marker, second_marker_db.marker) self.assertTrue( second_marker_db.updated_at > first_marker_db.updated_at)
def test_write_to_disk_empty_queue(self): dumper = Dumper(queue=queue.Queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, 0)
def test_update_marker(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') # Batch 1 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) self.assertTrue(new_marker is not None) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) # Batch 2 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def __init__(self, connection, queues): super(ExecutionsExporter, self).__init__(connection, queues) self.pending_executions = queue.Queue() self._dumper = Dumper( queue=self.pending_executions, export_dir=cfg.CONF.exporter.dump_dir ) self._consumer_thread = None
def test_update_marker_out_of_order_batch(self): executions_queue = self.get_queue() dumper = Dumper( queue=executions_queue, export_dir="/tmp", batch_size=5, max_files_per_sleep=1, file_prefix="st2-stuff-", file_format="json", ) timestamps = [ isotime.parse(execution.end_timestamp) for execution in self.execution_apis ] max_timestamp = max(timestamps) # set dumper persisted timestamp to something less than min timestamp in the batch test_timestamp = max_timestamp + datetime.timedelta(hours=1) dumper._persisted_marker = test_timestamp new_marker = dumper._update_marker(self.execution_apis) self.assertTrue(new_marker < test_timestamp) # Assert we rolled back the marker. self.assertEqual(dumper._persisted_marker, max_timestamp) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)
def test_get_file_name(self): dumper = Dumper(queue=self.get_queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime('%Y-%m-%d') self.assertTrue(file_name.startswith('/tmp/' + export_date + '/st2-stuff-')) self.assertTrue(file_name.endswith('json'))
def test_get_batch_batch_size_greater_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) dumper = Dumper(queue=executions_queue, batch_size=2 * qsize, export_dir='/tmp') batch = dumper._get_batch() self.assertEqual(len(batch), qsize)
def test_write_to_disk(self): executions_queue = self.get_queue() max_files_per_sleep = 5 dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=1, max_files_per_sleep=max_files_per_sleep, file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, max_files_per_sleep)
def test_start_stop_dumper(self): executions_queue = self.get_queue() sleep_interval = 0.01 dumper = Dumper(queue=executions_queue, sleep_interval=sleep_interval, export_dir='/tmp', batch_size=1, max_files_per_sleep=5, file_prefix='st2-stuff-', file_format='json') dumper.start() # Call stop after at least one batch was written to disk. eventlet.sleep(10 * sleep_interval) dumper.stop()
def test_get_batch_batch_size_lesser_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) expected_batch_size = int(qsize / 2) dumper = Dumper(queue=executions_queue, batch_size=expected_batch_size, export_dir="/tmp") batch = dumper._get_batch() self.assertEqual(len(batch), expected_batch_size)
def test_write_marker_to_db(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) marker_db = dumper._write_marker_to_db(max_timestamp) persisted_marker = marker_db.marker self.assertTrue(isinstance(persisted_marker, six.string_types)) self.assertEqual(isotime.parse(persisted_marker), max_timestamp)
def test_get_file_name(self): dumper = Dumper( queue=self.get_queue(), export_dir="/tmp", file_prefix="st2-stuff-", file_format="json", ) file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime("%Y-%m-%d") self.assertTrue( file_name.startswith("/tmp/" + export_date + "/st2-stuff-")) self.assertTrue(file_name.endswith("json"))