def test_executions_to_time_filter(self): with db_api.transaction(read_only=True): created0 = db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate( '_|executions(to_time="2020-01-01")', ctx ) self.assertEqual([created0, created1], result) result = self._evaluator.evaluate( '_|executions(to_time="2016-12-01 15:01:00")', ctx ) self.assertEqual([created0], result) result = self._evaluator.evaluate( '_|executions(id="two", to_time="2016-12-01 15:01:00")', ctx ) self.assertEqual([], result)
def test_executions(self): with db_api.transaction(read_only=True): created0 = db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate('_|executions()', ctx) self.assertEqual([created0, created1], result)
def _create_workflow_execution(wf_def, wf_spec, wf_input, desc, params): wf_ex = db_api.create_workflow_execution({ 'name': wf_def.name, 'description': desc, 'workflow_name': wf_def.name, 'workflow_id': wf_def.id, 'spec': wf_spec.to_dict(), 'params': params or {}, 'state': states.IDLE, 'input': wf_input or {}, 'output': {}, 'context': copy.deepcopy(wf_input) or {}, 'task_execution_id': params.get('task_execution_id'), 'runtime_context': { 'with_items_index': params.get('with_items_index', 0) }, }) data_flow.add_openstack_data_to_context(wf_ex) data_flow.add_execution_to_context(wf_ex) data_flow.add_environment_to_context(wf_ex) data_flow.add_workflow_variables_to_context(wf_ex, wf_spec) return wf_ex
def _create_execution(self, wf_def, wf_ex_id, input_dict, desc, params): self.wf_ex = db_api.create_workflow_execution({ 'id': wf_ex_id, 'name': wf_def.name, 'description': desc, 'workflow_name': wf_def.name, 'workflow_namespace': wf_def.namespace, 'workflow_id': wf_def.id, 'spec': self.wf_spec.to_dict(), 'state': states.IDLE, 'output': {}, 'task_execution_id': params.get('task_execution_id'), 'root_execution_id': params.get('root_execution_id'), 'runtime_context': { 'index': params.get('index', 0) }, }) self.wf_ex.input = input_dict or {} params['env'] = _get_environment(params) self.wf_ex.params = params data_flow.add_openstack_data_to_context(self.wf_ex) data_flow.add_execution_to_context(self.wf_ex) data_flow.add_workflow_variables_to_context(self.wf_ex, self.wf_spec) spec_parser.cache_workflow_spec_by_execution_id( self.wf_ex.id, self.wf_spec )
def _create_execution(self, input_dict, desc, params): self.wf_ex = db_api.create_workflow_execution({ 'name': self.wf_def.name, 'description': desc, 'workflow_name': self.wf_def.name, 'workflow_id': self.wf_def.id, 'spec': self.wf_spec.to_dict(), 'state': states.IDLE, 'output': {}, 'task_execution_id': params.get('task_execution_id'), 'runtime_context': { 'index': params.get('index', 0) }, }) self.wf_ex.input = input_dict or {} self.wf_ex.context = copy.deepcopy(input_dict) or {} env = _get_environment(params) if env: params['env'] = env self.wf_ex.params = params data_flow.add_openstack_data_to_context(self.wf_ex) data_flow.add_execution_to_context(self.wf_ex) data_flow.add_environment_to_context(self.wf_ex) data_flow.add_workflow_variables_to_context(self.wf_ex, self.wf_spec)
def _load_executions(): time_now = datetime.datetime.now() wf_execs = [ { 'id': '123', 'name': 'success_expired', 'created_at': time_now - datetime.timedelta(minutes=60), 'updated_at': time_now - datetime.timedelta(minutes=59), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': '456', 'name': 'error_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "ERROR", }, { 'id': '789', 'name': 'running_not_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "RUNNING", }, { 'id': '987', 'name': 'success_not_expired', 'created_at': time_now - datetime.timedelta(minutes=15), 'updated_at': time_now - datetime.timedelta(minutes=5), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': '654', 'name': 'expired but not a parent', 'created_at': time_now - datetime.timedelta(days=15), 'updated_at': time_now - datetime.timedelta(days=10), 'workflow_name': 'test_exec', 'state': "SUCCESS", 'task_execution_id': '789' } ] for wf_exec in wf_execs: db_api.create_workflow_execution(wf_exec)
def test_read_only_transactions(self): with db_api.transaction(): db_api.create_workflow_execution(WF_EXECS[0]) wf_execs = db_api.get_workflow_executions() self.assertEqual(1, len(wf_execs)) wf_execs = db_api.get_workflow_executions() self.assertEqual(1, len(wf_execs)) with db_api.transaction(read_only=True): db_api.create_workflow_execution(WF_EXECS[1]) wf_execs = db_api.get_workflow_executions() self.assertEqual(2, len(wf_execs)) wf_execs = db_api.get_workflow_executions() self.assertEqual(1, len(wf_execs))
def _create_workflow_execution(self, params): wf_def = db_api.get_workflow_definitions()[0] self.wf_ex = db_api.create_workflow_execution({ 'id': '1-2-3-4', 'spec': self.wb_spec.get_workflows().get('wf').to_dict(), 'state': states.RUNNING, 'params': params, 'workflow_id': wf_def.id })
def _load_executions(): time_now = datetime.datetime.now() wf_execs = [{ 'id': '123', 'name': 'success_expired', 'created_at': time_now - datetime.timedelta(minutes=60), 'updated_at': time_now - datetime.timedelta(minutes=59), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': '456', 'name': 'error_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "ERROR", }, { 'id': '789', 'name': 'running_not_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "RUNNING", }, { 'id': '987', 'name': 'success_not_expired', 'created_at': time_now - datetime.timedelta(minutes=15), 'updated_at': time_now - datetime.timedelta(minutes=5), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': '654', 'name': 'expired but not a parent', 'created_at': time_now - datetime.timedelta(days=15), 'updated_at': time_now - datetime.timedelta(days=10), 'workflow_name': 'test_exec', 'state': "SUCCESS", 'task_execution_id': '789' }] for wf_exec in wf_execs: db_api.create_workflow_execution(wf_exec)
def test_executions_state_filter(self): with db_api.transaction(read_only=True): db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate( '_|executions(state="RUNNING")', ctx ) self.assertEqual([created1], result) result = self._evaluator.evaluate( '_|executions(id="one", state="RUNNING")', ctx ) self.assertEqual([], result)
def test_from_dict(self): wf_ex = db_api.create_workflow_execution(WF_EXEC) self.assertIsNotNone(wf_ex) wf_ex_resource = resources.Execution.from_dict(wf_ex.to_dict()) self.assertIsNotNone(wf_ex_resource) expected = copy.copy(WF_EXEC) del expected['some_invalid_field'] utils.datetime_to_str_in_dict(expected, 'created_at') self.assertDictEqual(expected, wf_ex.to_dict())
def _create_workflow_execution(wf_def, wf_spec, wf_input, params): wf_ex = db_api.create_workflow_execution({ 'name': wf_def.name, 'workflow_name': wf_def.name, 'spec': wf_spec.to_dict(), 'params': params or {}, 'state': states.RUNNING, 'input': wf_input or {}, 'output': {}, 'context': copy.copy(wf_input) or {}, 'task_execution_id': params.get('task_execution_id'), 'runtime_context': { 'with_items_index': params.get('with_items_index', 0) }, }) data_flow.add_openstack_data_to_context(wf_ex.context) data_flow.add_execution_to_context(wf_ex, wf_ex.context) data_flow.add_environment_to_context(wf_ex, wf_ex.context) return wf_ex
def _create_workflow_executions(): time_now = datetime.datetime.utcnow() wf_execs = [ { 'id': 'success_expired', 'name': 'success_expired', 'created_at': time_now - datetime.timedelta(minutes=60), 'updated_at': time_now - datetime.timedelta(minutes=59), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': 'error_expired', 'name': 'error_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "ERROR", }, { 'id': 'running_not_expired', 'name': 'running_not_expired', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=3), 'workflow_name': 'test_exec', 'state': "RUNNING", }, { 'id': 'running_not_expired2', 'name': 'running_not_expired2', 'created_at': time_now - datetime.timedelta(days=3, minutes=10), 'updated_at': time_now - datetime.timedelta(days=4), 'workflow_name': 'test_exec', 'state': "RUNNING", }, { 'id': 'success_not_expired', 'name': 'success_not_expired', 'created_at': time_now - datetime.timedelta(minutes=15), 'updated_at': time_now - datetime.timedelta(minutes=5), 'workflow_name': 'test_exec', 'state': "SUCCESS", }, { 'id': 'abc', 'name': 'cancelled_expired', 'created_at': time_now - datetime.timedelta(minutes=60), 'updated_at': time_now - datetime.timedelta(minutes=59), 'workflow_name': 'test_exec', 'state': "CANCELLED", }, { 'id': 'cancelled_not_expired', 'name': 'cancelled_not_expired', 'created_at': time_now - datetime.timedelta(minutes=15), 'updated_at': time_now - datetime.timedelta(minutes=6), 'workflow_name': 'test_exec', 'state': "CANCELLED", } ] for wf_exec in wf_execs: db_api.create_workflow_execution(wf_exec) # Create a nested workflow execution. db_api.create_task_execution( { 'id': 'running_not_expired', 'workflow_execution_id': 'success_not_expired', 'name': 'my_task' } ) db_api.create_workflow_execution( { 'id': 'expired_but_not_a_parent', 'name': 'expired_but_not_a_parent', 'created_at': time_now - datetime.timedelta(days=15), 'updated_at': time_now - datetime.timedelta(days=10), 'workflow_name': 'test_exec', 'state': "SUCCESS", 'task_execution_id': 'running_not_expired' } )
def test_cache_workflow_spec_by_execution_id(self): wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="Echo" """ wfs = wf_service.create_workflows(wf_text) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size()) wf_def = wfs[0] wf_spec = spec_parser.get_workflow_spec_by_definition_id( wf_def.id, wf_def.updated_at) self.assertEqual(1, len(wf_spec.get_tasks())) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) with db_api.transaction(): wf_ex = db_api.create_workflow_execution({ 'id': '1-2-3-4', 'name': 'wf', 'workflow_id': wf_def.id, 'spec': wf_spec.to_dict(), 'state': states.RUNNING }) # Check that we can get a valid spec by execution id. wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id) self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks())) # Now update workflow definition and check that cache is updated too. wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="1" task2: action: std.echo output="2" """ wfs = wf_service.update_workflows(wf_text) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at) self.assertEqual(2, len(wf_spec.get_tasks())) self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size()) # Now finally update execution cache and check that we can # get a valid spec by execution id. spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec) wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id) self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))
def _load_executions(): for wf_exec in WF_EXECS: db_api.create_workflow_execution(wf_exec)
def test_cache_workflow_spec_by_execution_id(self): wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="Echo" """ wfs = wf_service.create_workflows(wf_text) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size()) wf_def = wfs[0] wf_spec = spec_parser.get_workflow_spec_by_definition_id( wf_def.id, wf_def.updated_at ) self.assertEqual(1, len(wf_spec.get_tasks())) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_ex = db_api.create_workflow_execution({ 'id': '1-2-3-4', 'name': 'wf', 'workflow_id': wf_def.id, 'spec': wf_spec.to_dict(), 'state': states.RUNNING }) # Check that we can get a valid spec by execution id. wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id ) self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks())) # Now update workflow definition and check that cache is updated too. wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="1" task2: action: std.echo output="2" """ wfs = wf_service.update_workflows(wf_text) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at ) self.assertEqual(2, len(wf_spec.get_tasks())) self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size()) # Now finally update execution cache and check that we can # get a valid spec by execution id. spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec) wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id ) self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))