def test_crud_complete(self): # Create the DB record. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_workflow)) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) model = ActionExecution.get_by_id(obj.id) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, self.fake_history_workflow['trigger']) self.assertDictEqual(model.trigger_type, self.fake_history_workflow['trigger_type']) self.assertDictEqual(model.trigger_instance, self.fake_history_workflow['trigger_instance']) self.assertDictEqual(model.rule, self.fake_history_workflow['rule']) self.assertDictEqual(model.action, self.fake_history_workflow['action']) self.assertDictEqual(model.runner, self.fake_history_workflow['runner']) doc = copy.deepcopy(self.fake_history_workflow['liveaction']) doc['start_timestamp'] = doc['start_timestamp'] doc['end_timestamp'] = doc['end_timestamp'] self.assertDictEqual(model.liveaction, doc) self.assertIsNone(getattr(model, 'parent', None)) self.assertListEqual(model.children, self.fake_history_workflow['children']) # Update the DB record. children = [str(bson.ObjectId()), str(bson.ObjectId())] model.children = children ActionExecution.add_or_update(model) model = ActionExecution.get_by_id(obj.id) self.assertListEqual(model.children, children) # Delete the DB record. ActionExecution.delete(model) self.assertRaises(ValueError, ActionExecution.get_by_id, obj.id)
def test_crud_partial(self): # Create the DB record. obj = ActionExecutionAPI( **copy.deepcopy(self.fake_history_subtasks[0])) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) model = ActionExecution.get_by_id(obj.id) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, {}) self.assertDictEqual(model.trigger_type, {}) self.assertDictEqual(model.trigger_instance, {}) self.assertDictEqual(model.rule, {}) self.assertDictEqual(model.action, self.fake_history_subtasks[0]['action']) self.assertDictEqual(model.runner, self.fake_history_subtasks[0]['runner']) doc = copy.deepcopy(self.fake_history_subtasks[0]['liveaction']) doc['start_timestamp'] = doc['start_timestamp'] doc['end_timestamp'] = doc['end_timestamp'] self.assertDictEqual(model.liveaction, doc) self.assertEqual(model.parent, self.fake_history_subtasks[0]['parent']) self.assertListEqual(model.children, []) # Update the DB record. children = [str(bson.ObjectId()), str(bson.ObjectId())] model.children = children ActionExecution.add_or_update(model) model = ActionExecution.get_by_id(obj.id) self.assertListEqual(model.children, children) # Delete the DB record. ActionExecution.delete(model) self.assertRaises(StackStormDBObjectNotFoundError, ActionExecution.get_by_id, obj.id)
def test_install(self, _handle_schedule_execution): _handle_schedule_execution.return_value = ActionExecutionAPI(id='123') payload = {'packs': ['some']} resp = self.app.post_json('/v1/packs/install', payload) self.assertEqual(resp.status_int, 202) self.assertEqual(resp.json, {'execution_id': '123'})
def test_model_complete(self): # Create API object. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_workflow)) self.assertDictEqual(obj.trigger, self.fake_history_workflow['trigger']) self.assertDictEqual(obj.trigger_type, self.fake_history_workflow['trigger_type']) self.assertDictEqual(obj.trigger_instance, self.fake_history_workflow['trigger_instance']) self.assertDictEqual(obj.rule, self.fake_history_workflow['rule']) self.assertDictEqual(obj.action, self.fake_history_workflow['action']) self.assertDictEqual(obj.runner, self.fake_history_workflow['runner']) self.assertEquals(obj.liveaction, self.fake_history_workflow['liveaction']) self.assertIsNone(getattr(obj, 'parent', None)) self.assertListEqual(obj.children, self.fake_history_workflow['children']) # Convert API object to DB model. model = ActionExecutionAPI.to_model(obj) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, self.fake_history_workflow['trigger']) self.assertDictEqual(model.trigger_type, self.fake_history_workflow['trigger_type']) self.assertDictEqual(model.trigger_instance, self.fake_history_workflow['trigger_instance']) self.assertDictEqual(model.rule, self.fake_history_workflow['rule']) self.assertDictEqual(model.action, self.fake_history_workflow['action']) self.assertDictEqual(model.runner, self.fake_history_workflow['runner']) doc = copy.deepcopy(self.fake_history_workflow['liveaction']) doc['start_timestamp'] = doc['start_timestamp'] doc['end_timestamp'] = doc['end_timestamp'] self.assertDictEqual(model.liveaction, doc) self.assertIsNone(getattr(model, 'parent', None)) self.assertListEqual(model.children, self.fake_history_workflow['children']) # Convert DB model to API object. obj = ActionExecutionAPI.from_model(model) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(obj.trigger, self.fake_history_workflow['trigger']) self.assertDictEqual(obj.trigger_type, self.fake_history_workflow['trigger_type']) self.assertDictEqual(obj.trigger_instance, self.fake_history_workflow['trigger_instance']) self.assertDictEqual(obj.rule, self.fake_history_workflow['rule']) self.assertDictEqual(obj.action, self.fake_history_workflow['action']) self.assertDictEqual(obj.runner, self.fake_history_workflow['runner']) self.assertDictEqual(obj.liveaction, self.fake_history_workflow['liveaction']) self.assertIsNone(getattr(obj, 'parent', None)) self.assertListEqual(obj.children, self.fake_history_workflow['children'])
def test_model_complete(self): # Create API object. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_workflow)) self.assertDictEqual(obj.trigger, self.fake_history_workflow["trigger"]) self.assertDictEqual(obj.trigger_type, self.fake_history_workflow["trigger_type"]) self.assertDictEqual(obj.trigger_instance, self.fake_history_workflow["trigger_instance"]) self.assertDictEqual(obj.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(obj.action, self.fake_history_workflow["action"]) self.assertDictEqual(obj.runner, self.fake_history_workflow["runner"]) self.assertEqual(obj.liveaction, self.fake_history_workflow["liveaction"]) self.assertIsNone(getattr(obj, "parent", None)) self.assertListEqual(obj.children, self.fake_history_workflow["children"]) # Convert API object to DB model. model = ActionExecutionAPI.to_model(obj) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, self.fake_history_workflow["trigger"]) self.assertDictEqual(model.trigger_type, self.fake_history_workflow["trigger_type"]) self.assertDictEqual(model.trigger_instance, self.fake_history_workflow["trigger_instance"]) self.assertDictEqual(model.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(model.action, self.fake_history_workflow["action"]) self.assertDictEqual(model.runner, self.fake_history_workflow["runner"]) doc = copy.deepcopy(self.fake_history_workflow["liveaction"]) doc["start_timestamp"] = doc["start_timestamp"] doc["end_timestamp"] = doc["end_timestamp"] self.assertDictEqual(model.liveaction, doc) self.assertIsNone(getattr(model, "parent", None)) self.assertListEqual(model.children, self.fake_history_workflow["children"]) # Convert DB model to API object. obj = ActionExecutionAPI.from_model(model) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(obj.trigger, self.fake_history_workflow["trigger"]) self.assertDictEqual(obj.trigger_type, self.fake_history_workflow["trigger_type"]) self.assertDictEqual(obj.trigger_instance, self.fake_history_workflow["trigger_instance"]) self.assertDictEqual(obj.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(obj.action, self.fake_history_workflow["action"]) self.assertDictEqual(obj.runner, self.fake_history_workflow["runner"]) self.assertDictEqual(obj.liveaction, self.fake_history_workflow["liveaction"]) self.assertIsNone(getattr(obj, "parent", None)) self.assertListEqual(obj.children, self.fake_history_workflow["children"])
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { "trigger": copy.deepcopy(fixture.ARTIFACTS["trigger"]), "trigger_type": copy.deepcopy(fixture.ARTIFACTS["trigger_type"]), "trigger_instance": copy.deepcopy( fixture.ARTIFACTS["trigger_instance"] ), "rule": copy.deepcopy(fixture.ARTIFACTS["rule"]), "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["chain"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["action-chain"]), "liveaction": copy.deepcopy( fixture.ARTIFACTS["liveactions"]["workflow"] ), "context": copy.deepcopy(fixture.ARTIFACTS["context"]), "children": [], }, { "action": copy.deepcopy(fixture.ARTIFACTS["actions"]["local"]), "runner": copy.deepcopy(fixture.ARTIFACTS["runners"]["run-local"]), "liveaction": copy.deepcopy(fixture.ARTIFACTS["liveactions"]["task1"]), }, ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action["name"] == "chain"] if candidates: parent = random.choice(candidates) child["parent"] = str(parent.id) parent.children.append(child["id"]) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data["id"] = obj_id data["start_timestamp"] = isotime.format(timestamp, offset=False) data["end_timestamp"] = isotime.format(timestamp, offset=False) data["status"] = data["liveaction"]["status"] data["result"] = data["liveaction"]["result"] if fake_type["action"]["name"] == "local" and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def setUpClass(cls): super(TestActionExecutionFilters, cls).setUpClass() cls.dt_base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) cls.num_records = 100 cls.refs = {} cls.start_timestamps = [] cls.fake_types = [ { 'trigger': copy.deepcopy(fixture.ARTIFACTS['trigger']), 'trigger_type': copy.deepcopy(fixture.ARTIFACTS['trigger_type']), 'trigger_instance': copy.deepcopy(fixture.ARTIFACTS['trigger_instance']), 'rule': copy.deepcopy(fixture.ARTIFACTS['rule']), 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['chain']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['action-chain']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['workflow']), 'context': copy.deepcopy(fixture.ARTIFACTS['context']), 'children': [] }, { 'action': copy.deepcopy(fixture.ARTIFACTS['actions']['local']), 'runner': copy.deepcopy(fixture.ARTIFACTS['runners']['run-local']), 'liveaction': copy.deepcopy(fixture.ARTIFACTS['liveactions']['task1']) } ] def assign_parent(child): candidates = [v for k, v in cls.refs.items() if v.action['name'] == 'chain'] if candidates: parent = random.choice(candidates) child['parent'] = str(parent.id) parent.children.append(child['id']) cls.refs[str(parent.id)] = ActionExecution.add_or_update(parent) for i in range(cls.num_records): obj_id = str(bson.ObjectId()) timestamp = cls.dt_base + datetime.timedelta(seconds=i) fake_type = random.choice(cls.fake_types) data = copy.deepcopy(fake_type) data['id'] = obj_id data['start_timestamp'] = isotime.format(timestamp, offset=False) data['end_timestamp'] = isotime.format(timestamp, offset=False) data['status'] = data['liveaction']['status'] data['result'] = data['liveaction']['result'] if fake_type['action']['name'] == 'local' and random.choice([True, False]): assign_parent(data) wb_obj = ActionExecutionAPI(**data) db_obj = ActionExecutionAPI.to_model(wb_obj) cls.refs[obj_id] = ActionExecution.add_or_update(db_obj) cls.start_timestamps.append(timestamp) cls.start_timestamps = sorted(cls.start_timestamps)
def test_model_partial(self): # Create API object. obj = ActionExecutionAPI( **copy.deepcopy(self.fake_history_subtasks[0])) self.assertIsNone(getattr(obj, 'trigger', None)) self.assertIsNone(getattr(obj, 'trigger_type', None)) self.assertIsNone(getattr(obj, 'trigger_instance', None)) self.assertIsNone(getattr(obj, 'rule', None)) self.assertDictEqual(obj.action, self.fake_history_subtasks[0]['action']) self.assertDictEqual(obj.runner, self.fake_history_subtasks[0]['runner']) self.assertDictEqual(obj.liveaction, self.fake_history_subtasks[0]['liveaction']) self.assertEqual(obj.parent, self.fake_history_subtasks[0]['parent']) self.assertIsNone(getattr(obj, 'children', None)) # Convert API object to DB model. model = ActionExecutionAPI.to_model(obj) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, {}) self.assertDictEqual(model.trigger_type, {}) self.assertDictEqual(model.trigger_instance, {}) self.assertDictEqual(model.rule, {}) self.assertDictEqual(model.action, self.fake_history_subtasks[0]['action']) self.assertDictEqual(model.runner, self.fake_history_subtasks[0]['runner']) doc = copy.deepcopy(self.fake_history_subtasks[0]['liveaction']) doc['start_timestamp'] = doc['start_timestamp'] doc['end_timestamp'] = doc['end_timestamp'] self.assertDictEqual(model.liveaction, doc) self.assertEqual(model.parent, self.fake_history_subtasks[0]['parent']) self.assertListEqual(model.children, []) # Convert DB model to API object. obj = ActionExecutionAPI.from_model(model) self.assertEqual(str(model.id), obj.id) self.assertIsNone(getattr(obj, 'trigger', None)) self.assertIsNone(getattr(obj, 'trigger_type', None)) self.assertIsNone(getattr(obj, 'trigger_instance', None)) self.assertIsNone(getattr(obj, 'rule', None)) self.assertDictEqual(obj.action, self.fake_history_subtasks[0]['action']) self.assertDictEqual(obj.runner, self.fake_history_subtasks[0]['runner']) self.assertDictEqual(obj.liveaction, self.fake_history_subtasks[0]['liveaction']) self.assertEqual(obj.parent, self.fake_history_subtasks[0]['parent']) self.assertIsNone(getattr(obj, 'children', None))
class TestDumper(DbTestCase): fixtures_loader = FixturesLoader() loaded_fixtures = fixtures_loader.load_fixtures(fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES) loaded_executions = loaded_fixtures['executions'] execution_apis = [] for execution in loaded_executions.values(): execution_apis.append(ActionExecutionAPI(**execution)) def get_queue(self): executions_queue = queue.Queue() for execution in self.execution_apis: executions_queue.put(execution) return executions_queue @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_write_marker_to_db(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) marker_db = dumper._write_marker_to_db(max_timestamp) persisted_marker = marker_db.marker self.assertTrue(isinstance(persisted_marker, six.string_types)) self.assertEqual(isotime.parse(persisted_marker), max_timestamp) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_write_marker_to_db_marker_exists(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [isotime.parse(execution.end_timestamp) for execution in self.execution_apis] max_timestamp = max(timestamps) first_marker_db = dumper._write_marker_to_db(max_timestamp) second_marker_db = dumper._write_marker_to_db(max_timestamp + datetime.timedelta(hours=1)) markers = DumperMarker.get_all() self.assertEqual(len(markers), 1) final_marker_id = markers[0].id self.assertEqual(first_marker_db.id, final_marker_id) self.assertEqual(second_marker_db.id, final_marker_id) self.assertEqual(markers[0].marker, second_marker_db.marker) self.assertTrue(second_marker_db.updated_at > first_marker_db.updated_at)
def test_datetime_range(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc['id'] = str(bson.ObjectId()) doc['start_timestamp'] = isotime.format(timestamp) obj = ActionExecutionAPI(**doc) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) dt_range = '2014-12-25T00:00:10Z..2014-12-25T00:00:19Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10) dt_range = '2014-12-25T00:00:19Z..2014-12-25T00:00:10Z' objs = ActionExecution.query(start_timestamp=dt_range) self.assertEqual(len(objs), 10)
def test_sort_by_start_timestamp(self): base = date_utils.add_utc_tz(datetime.datetime(2014, 12, 25, 0, 0, 0)) for i in range(60): timestamp = base + datetime.timedelta(seconds=i) doc = copy.deepcopy(self.fake_history_subtasks[0]) doc["id"] = str(bson.ObjectId()) doc["start_timestamp"] = isotime.format(timestamp) obj = ActionExecutionAPI(**doc) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) dt_range = "2014-12-25T00:00:10Z..2014-12-25T00:00:19Z" objs = ActionExecution.query(start_timestamp=dt_range, order_by=["start_timestamp"]) self.assertLess(objs[0]["start_timestamp"], objs[9]["start_timestamp"]) dt_range = "2014-12-25T00:00:19Z..2014-12-25T00:00:10Z" objs = ActionExecution.query(start_timestamp=dt_range, order_by=["-start_timestamp"]) self.assertLess(objs[9]["start_timestamp"], objs[0]["start_timestamp"])
def test_crud_complete(self): # Create the DB record. obj = ActionExecutionAPI(**copy.deepcopy(self.fake_history_workflow)) ActionExecution.add_or_update(ActionExecutionAPI.to_model(obj)) model = ActionExecution.get_by_id(obj.id) self.assertEqual(str(model.id), obj.id) self.assertDictEqual(model.trigger, self.fake_history_workflow["trigger"]) self.assertDictEqual(model.trigger_type, self.fake_history_workflow["trigger_type"]) self.assertDictEqual(model.trigger_instance, self.fake_history_workflow["trigger_instance"]) self.assertDictEqual(model.rule, self.fake_history_workflow["rule"]) self.assertDictEqual(model.action, self.fake_history_workflow["action"]) self.assertDictEqual(model.runner, self.fake_history_workflow["runner"]) doc = copy.deepcopy(self.fake_history_workflow["liveaction"]) doc["start_timestamp"] = doc["start_timestamp"] doc["end_timestamp"] = doc["end_timestamp"] self.assertDictEqual(model.liveaction, doc) self.assertIsNone(getattr(model, "parent", None)) self.assertListEqual(model.children, self.fake_history_workflow["children"]) # Update the DB record. children = [str(bson.ObjectId()), str(bson.ObjectId())] model.children = children ActionExecution.add_or_update(model) model = ActionExecution.get_by_id(obj.id) self.assertListEqual(model.children, children) # Delete the DB record. ActionExecution.delete(model) self.assertRaises(StackStormDBObjectNotFoundError, ActionExecution.get_by_id, obj.id)
class TestDumper(EventletTestCase): fixtures_loader = FixturesLoader() loaded_fixtures = fixtures_loader.load_fixtures( fixtures_pack=DESCENDANTS_PACK, fixtures_dict=DESCENDANTS_FIXTURES) loaded_executions = loaded_fixtures['executions'] execution_apis = [] for execution in loaded_executions.values(): execution_apis.append(ActionExecutionAPI(**execution)) def get_queue(self): executions_queue = queue.Queue() for execution in self.execution_apis: executions_queue.put(execution) return executions_queue @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_get_batch_batch_size_greater_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) dumper = Dumper(queue=executions_queue, batch_size=2 * qsize, export_dir='/tmp') batch = dumper._get_batch() self.assertEqual(len(batch), qsize) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_get_batch_batch_size_lesser_than_actual(self): executions_queue = self.get_queue() qsize = executions_queue.qsize() self.assertTrue(qsize > 0) expected_batch_size = int(qsize / 2) dumper = Dumper(queue=executions_queue, batch_size=expected_batch_size, export_dir='/tmp') batch = dumper._get_batch() self.assertEqual(len(batch), expected_batch_size) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_get_file_name(self): dumper = Dumper(queue=self.get_queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') file_name = dumper._get_file_name() export_date = date_utils.get_datetime_utc_now().strftime('%Y-%m-%d') self.assertTrue( file_name.startswith('/tmp/' + export_date + '/st2-stuff-')) self.assertTrue(file_name.endswith('json')) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_write_to_disk_empty_queue(self): dumper = Dumper(queue=queue.Queue(), export_dir='/tmp', file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, 0) @mock.patch.object(TextFileWriter, 'write_text', mock.MagicMock(return_value=True)) @mock.patch.object(Dumper, '_update_marker', mock.MagicMock(return_value=None)) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) def test_write_to_disk(self): executions_queue = self.get_queue() max_files_per_sleep = 5 dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=1, max_files_per_sleep=max_files_per_sleep, file_prefix='st2-stuff-', file_format='json') # We just make sure this doesn't blow up. ret = dumper._write_to_disk() self.assertEqual(ret, max_files_per_sleep) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) @mock.patch.object(TextFileWriter, 'write_text', mock.MagicMock(return_value=True)) def test_start_stop_dumper(self): executions_queue = self.get_queue() sleep_interval = 0.01 dumper = Dumper(queue=executions_queue, sleep_interval=sleep_interval, export_dir='/tmp', batch_size=1, max_files_per_sleep=5, file_prefix='st2-stuff-', file_format='json') dumper.start() # Call stop after at least one batch was written to disk. eventlet.sleep(10 * sleep_interval) dumper.stop() @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) @mock.patch.object(Dumper, '_write_marker_to_db', mock.MagicMock(return_value=True)) def test_update_marker(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') # Batch 1 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) self.assertTrue(new_marker is not None) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) # Batch 2 batch = self.execution_apis[0:5] new_marker = dumper._update_marker(batch) timestamps = [ isotime.parse(execution.end_timestamp) for execution in batch ] max_timestamp = max(timestamps) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker) @mock.patch.object(os.path, 'exists', mock.MagicMock(return_value=True)) @mock.patch.object(Dumper, '_write_marker_to_db', mock.MagicMock(return_value=True)) def test_update_marker_out_of_order_batch(self): executions_queue = self.get_queue() dumper = Dumper(queue=executions_queue, export_dir='/tmp', batch_size=5, max_files_per_sleep=1, file_prefix='st2-stuff-', file_format='json') timestamps = [ isotime.parse(execution.end_timestamp) for execution in self.execution_apis ] max_timestamp = max(timestamps) # set dumper persisted timestamp to something less than min timestamp in the batch test_timestamp = max_timestamp + datetime.timedelta(hours=1) dumper._persisted_marker = test_timestamp new_marker = dumper._update_marker(self.execution_apis) self.assertTrue(new_marker < test_timestamp) # Assert we rolled back the marker. self.assertEqual(dumper._persisted_marker, max_timestamp) self.assertEqual(new_marker, max_timestamp) dumper._write_marker_to_db.assert_called_with(new_marker)