def test_workflow_metrics(self): error_id = execution_db_help.load_workflow('multiactionError', 'multiactionErrorWorkflow').id test_id = execution_db_help.load_workflow('multiactionWorkflowTest', 'multiactionWorkflow').id current_app.running_context.executor.execute_workflow(error_id) current_app.running_context.executor.execute_workflow(error_id) current_app.running_context.executor.execute_workflow(test_id) current_app.running_context.executor.wait_and_reset(3) response = self.test_client.get('/api/metrics/workflows', headers=self.headers) self.assertEqual(response.status_code, 200) response = json.loads(response.get_data(as_text=True)) self.assertDictEqual(response, _convert_workflow_time_averages())
def test_multi_action_workflow(self): workflow = execution_db_help.load_workflow('multiactionWorkflowTest', 'multiactionWorkflow') action_names = ['start', '1'] action_ids = [ action.id for action in workflow.actions if action.name in action_names ] setup_subscriptions_for_action(workflow.id, action_ids) multiprocessedexecutor.execute_workflow(workflow.id) multiprocessedexecutor.wait_and_reset(1) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, self.start, datetime.utcnow())) self.assertEqual(len(actions), 2) expected_results = [{ 'result': { "message": "HELLO WORLD" }, 'status': 'Success' }, { 'result': "REPEATING: Hello World", 'status': 'Success' }] for result in [action['data'] for action in actions]: self.assertIn(result, expected_results)
def test_execute_workflow_change_arguments(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') result = {'count': 0} @WalkoffEvent.ActionExecutionSuccess.connect def y(sender, **kwargs): result['count'] += 1 result['data'] = kwargs['data']['data'] data = { "workflow_id": str(workflow.id), "arguments": [{ "name": "call", "value": "CHANGE INPUT" }] } self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=SUCCESS_ASYNC, content_type="application/json", data=json.dumps(data)) current_app.running_context.executor.wait_and_reset(1) self.assertEqual(result['count'], 1) self.assertDictEqual(result['data'], { 'status': 'Success', 'result': 'REPEATING: CHANGE INPUT' })
def test_workflow_with_dataflow(self): workflow = execution_db_help.load_workflow('dataflowTest', 'dataflowWorkflow') action_names = ['start', '1', '2'] action_ids = [ action.id for action in workflow.actions if action.name in action_names ] setup_subscriptions_for_action(workflow.id, action_ids) multiprocessedexecutor.execute_workflow(workflow.id) multiprocessedexecutor.wait_and_reset(1) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, self.start, datetime.utcnow())) self.assertEqual(len(actions), 3) expected_results = [{ 'result': 6, 'status': 'Success' }, { 'result': 6, 'status': 'Success' }, { 'result': 15, 'status': 'Success' }] for result in [action['data'] for action in actions]: self.assertIn(result, expected_results)
def test_execute_workflow_change_to_invalid_arguments(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') data = {"workflow_id": str(workflow.id), "arguments": [{"name": "call"}]} self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=INVALID_INPUT_ERROR, content_type="application/json", data=json.dumps(data))
def test_trigger_execute_multiple_workflows(self): workflow = execution_db_help.load_workflow('triggerActionWorkflow', 'triggerActionWorkflow') ids = [] num_workflows = 2 expected_events = ( WalkoffEvent.TriggerActionAwaitingData, WalkoffEvent.TriggerActionTaken, WalkoffEvent.ActionExecutionSuccess) callback_count = {event: 0 for event in expected_events} def wait_thread(): time.sleep(0.1) executed_ids = set() timeout = 0 threshold = 5 while len(ids) == num_workflows and len(executed_ids) != len(ids) and timeout < threshold: data = {"execution_ids": ids, "data_in": {"data": "1"}} trigger_response = self.put_with_status_check( '/api/triggers/send_data', headers=self.headers, data=json.dumps(data), status_code=SUCCESS, content_type='application/json') executed_ids.update(set.intersection(set(ids), set(trigger_response))) time.sleep(0.1) timeout += 0.1 @WalkoffEvent.TriggerActionAwaitingData.connect def send_data(sender, **kwargs): callback_count[WalkoffEvent.TriggerActionAwaitingData] += 1 if callback_count[WalkoffEvent.TriggerActionAwaitingData] == num_workflows: threading.Thread(target=wait_thread).start() @WalkoffEvent.TriggerActionTaken.connect def trigger_taken_callback(sender, **kwargs): callback_count[WalkoffEvent.TriggerActionTaken] += 1 @WalkoffEvent.ActionExecutionSuccess.connect def action_success_callback(sender, **kwargs): callback_count[WalkoffEvent.ActionExecutionSuccess] += 1 execute_data = {"workflow_id": str(workflow.id)} for _ in range(num_workflows): response = self.post_with_status_check( '/api/workflowqueue', headers=self.headers, status_code=SUCCESS_ASYNC, content_type="application/json", data=json.dumps(execute_data)) ids.append(response['id']) current_app.running_context.executor.wait_and_reset(num_workflows) for count in callback_count.values(): self.assertEqual(count, num_workflows)
def test_execute_invalid_workflow(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') workflow.is_valid = False from walkoff.executiondb import ExecutionDatabase ExecutionDatabase.instance.session.add(workflow) ExecutionDatabase.instance.session.commit() self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=INVALID_INPUT_ERROR, content_type="application/json")
def test_trigger_execute_change_input(self): workflow = execution_db_help.load_workflow('triggerActionWorkflow', 'triggerActionWorkflow') expected_events = ( WalkoffEvent.TriggerActionAwaitingData, WalkoffEvent.TriggerActionTaken, WalkoffEvent.ActionExecutionSuccess) callback_count = {event: 0 for event in expected_events} def wait_thread(): data = {"execution_ids": ids, "data_in": {"data": "1"}, "arguments": [{"name": "call", "value": "CHANGE INPUT"}]} time.sleep(0.1) executed_ids = set() timeout = 0 threshold = 5 while len(executed_ids) != len(ids) and timeout < threshold: resp = self.put_with_status_check( '/api/triggers/send_data', headers=self.headers, data=json.dumps(data), status_code=SUCCESS, content_type='application/json') executed_ids.update(set.intersection(set(ids), set(resp))) time.sleep(0.1) timeout += 0.1 return @WalkoffEvent.TriggerActionAwaitingData.connect def send_data(sender, **kwargs): callback_count[WalkoffEvent.TriggerActionAwaitingData] += 1 threading.Thread(target=wait_thread).start() @WalkoffEvent.TriggerActionTaken.connect def trigger_taken_callback(sender, **kwargs): callback_count[WalkoffEvent.TriggerActionTaken] += 1 @WalkoffEvent.ActionExecutionSuccess.connect def action_success_callback(sender, **kwargs): self.assertDictEqual(kwargs['data']['data'], {'result': 'REPEATING: CHANGE INPUT', 'status': 'Success'}) callback_count[WalkoffEvent.ActionExecutionSuccess] += 1 execute_data = {"workflow_id": str(workflow.id)} response = self.post_with_status_check( '/api/workflowqueue', headers=self.headers, status_code=SUCCESS_ASYNC, content_type="application/json", data=json.dumps(execute_data)) ids = [response['id']] current_app.running_context.executor.wait_and_reset(1) for event, count in callback_count.items(): self.assertEqual(count, 1)
def test_pause_and_resume_workflow(self): execution_id = None result = dict() result['paused'] = False result['resumed'] = False def pause_resume_thread(): multiprocessedexecutor.pause_workflow(execution_id) return @WalkoffEvent.WorkflowPaused.connect def workflow_paused_listener(sender, **kwargs): result['paused'] = True wf_status = executiondb.execution_db.session.query( WorkflowStatus).filter_by( execution_id=sender['execution_id']).first() wf_status.paused() executiondb.execution_db.session.commit() multiprocessedexecutor.resume_workflow(execution_id) @WalkoffEvent.WorkflowResumed.connect def workflow_resumed_listener(sender, **kwargs): result['resumed'] = True workflow = execution_db_help.load_workflow('pauseResumeWorkflowFixed', 'pauseResumeWorkflow') action_ids = [action.id for action in workflow.actions] workflow_events = ['Workflow Paused', 'Workflow Resumed'] setup_subscriptions_for_action(workflow.id, action_ids, workflow_events=workflow_events) execution_id = multiprocessedexecutor.execute_workflow(workflow.id) while True: executiondb.execution_db.session.expire_all() workflow_status = executiondb.execution_db.session.query( WorkflowStatus).filter_by(execution_id=execution_id).first() if workflow_status and workflow_status.status == WorkflowStatusEnum.running: threading.Thread(target=pause_resume_thread).start() time.sleep(0) break multiprocessedexecutor.wait_and_reset(1) self.assertTrue(result['paused']) self.assertTrue(result['resumed']) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, self.start, datetime.utcnow())) self.assertGreaterEqual(len(actions), 1) self.assertEqual(actions[-1]['data']['result'], 'success')
def test_trigger_execute(self): workflow = execution_db_help.load_workflow('triggerActionWorkflow', 'triggerActionWorkflow') action_ids = [ action.id for action in workflow.actions if action.name == 'start' ] setup_subscriptions_for_action(workflow.id, action_ids, action_events=self.action_events) def wait_thread(): time.sleep(0.1) execd_ids = set([]) timeout = 0 threshold = 5 data = {"execution_ids": ids, "data_in": {"data": "1"}} while len(execd_ids) != len(ids) and timeout < threshold: resp = self.put_with_status_check( '/api/triggers/send_data', headers=self.headers, data=json.dumps(data), status_code=SUCCESS, content_type='application/json') execd_ids.update(set.intersection(set(ids), set(resp))) time.sleep(0.1) timeout += 0.1 return @WalkoffEvent.TriggerActionAwaitingData.connect def send_data(sender, **kwargs): threading.Thread(target=wait_thread).start() execute_data = {"workflow_id": str(workflow.id)} response = self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=SUCCESS_ASYNC, content_type="application/json", data=json.dumps(execute_data)) ids = [response['id']] flask_server.running_context.executor.wait_and_reset(1) actions = [] for id_ in action_ids: actions.extend( executed_actions(str(id_), self.start, datetime.utcnow())) self.assertEqual(len(actions), 3) events = [event['message'] for event in actions] expected_events = [ 'Trigger action awaiting data', 'Trigger action taken', 'Action executed successfully' ] self.assertListEqual(expected_events, events)
def assert_execution_event_log(self, playbook, workflow, expected_events): events = [] @WalkoffEvent.CommonWorkflowSignal.connect def log_event(sender, **kwargs): self.assertIn('event', kwargs) events.append(kwargs['event']) workflow = execution_db_help.load_workflow(playbook, workflow) self.executor.execute_workflow(workflow.id) self.executor.wait_and_reset(1) self.assertListEqual(events, expected_events)
def test_pause_and_resume_workflow(self): execution_id = None result = {status: False for status in ('paused', 'resumed', 'called')} workflow = execution_db_help.load_workflow('pauseResumeWorkflowFixed', 'pauseResumeWorkflow') workflow_id = workflow.id case = Case(name='name') self.app.running_context.case_db.session.add(case) self.app.running_context.case_db.session.commit() subscriptions = [Subscription( id=str(workflow_id), events=[WalkoffEvent.WorkflowPaused.signal_name])] self.app.running_context.executor.create_case(case.id, subscriptions) self.app.running_context.case_logger.add_subscriptions(case.id, [ Subscription(str(workflow_id), [WalkoffEvent.WorkflowResumed.signal_name])]) def pause_resume_thread(): self.app.running_context.executor.pause_workflow(execution_id) return @WalkoffEvent.WorkflowPaused.connect def workflow_paused_listener(sender, **kwargs): result['paused'] = True wf_status = self.app.running_context.execution_db.session.query(WorkflowStatus).filter_by( execution_id=sender['execution_id']).first() wf_status.paused() self.app.running_context.execution_db.session.commit() self.app.running_context.executor.resume_workflow(execution_id) @WalkoffEvent.WorkflowResumed.connect def workflow_resumed_listener(sender, **kwargs): result['resumed'] = True @WalkoffEvent.WorkflowExecutionStart.connect def workflow_started_listener(sender, **kwargs): self.assertEqual(sender['id'], str(workflow_id)) result['called'] = True execution_id = self.app.running_context.executor.execute_workflow(workflow_id) while True: self.app.running_context.execution_db.session.expire_all() workflow_status = self.app.running_context.execution_db.session.query(WorkflowStatus).filter_by( execution_id=execution_id).first() if workflow_status and workflow_status.status == WorkflowStatusEnum.running: threading.Thread(target=pause_resume_thread).start() time.sleep(0) break self.app.running_context.executor.wait_and_reset(1) for status in ('called', 'paused', 'resumed'): self.assertTrue(result[status])
def test_action_metrics(self): workflow = execution_db_help.load_workflow('multiactionError', 'multiactionErrorWorkflow') server.running_context.executor.execute_workflow(workflow.id) server.running_context.executor.wait_and_reset(1) response = self.app.get('/api/metrics/apps', headers=self.headers) self.assertEqual(response.status_code, 200) response = json.loads(response.get_data(as_text=True)) self.assertDictEqual(response, _convert_action_time_averages())
def test_environment_variables_in_workflow(self): workflow = execution_db_help.load_workflow('environmentVariables', 'environmentVariables') result = {'value': None} def action_finished_listener(sender, **kwargs): result['value'] = kwargs['data']['data'] WalkoffEvent.ActionExecutionSuccess.connect(action_finished_listener) self.executor.execute_workflow(workflow.id) self.executor.wait_and_reset(1) self.assertDictEqual(result['value'], {'result': 'REPEATING: CHANGE INPUT', 'status': 'Success'})
def test_execute_workflow_change_to_invalid_arguments(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') data = { "workflow_id": str(workflow.id), "arguments": [{ "name": "call" }] } self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=INVALID_INPUT_ERROR, content_type="application/json", data=json.dumps(data))
def test_simple_workflow_execution(self): workflow = execution_db_help.load_workflow('basicWorkflowTest', 'helloWorldWorkflow') workflow_id = workflow.id result = {'called': False} @WalkoffEvent.WorkflowExecutionStart.connect def started(sender, **data): self.assertEqual(sender['id'], str(workflow_id)) result['called'] = True self.app.running_context.executor.execute_workflow(workflow_id) self.app.running_context.executor.wait_and_reset(1) self.assertTrue(result['called'])
def test_environment_variables_in_execute(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') env_var = EnvironmentVariable(value='CHANGE INPUT', id=uuid4()) workflow.actions[0].arguments[0].value = None workflow.actions[0].arguments[0].reference = str(env_var.id) result = {'value': None} def action_finished_listener(sender, **kwargs): result['value'] = kwargs['data']['data'] WalkoffEvent.ActionExecutionSuccess.connect(action_finished_listener) self.executor.execute_workflow(workflow.id, environment_variables=[env_var]) self.executor.wait_and_reset(1) self.assertDictEqual(result['value'], {'result': 'REPEATING: CHANGE INPUT', 'status': 'Success'})
def test_change_action_input(self): arguments = [Argument(name='call', value='CHANGE INPUT')] result = {'value': None} def action_finished_listener(sender, **kwargs): result['value'] = kwargs['data']['data'] WalkoffEvent.ActionExecutionSuccess.connect(action_finished_listener) workflow = execution_db_help.load_workflow('simpleDataManipulationWorkflow', 'helloWorldWorkflow') self.executor.execute_workflow(workflow.id, start_arguments=arguments) self.executor.wait_and_reset(1) self.assertDictEqual( result['value'], {'result': 'REPEATING: CHANGE INPUT', 'status': 'Success'})
def test_execute_workflow(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') workflow_id = str(workflow.id) data = {"workflow_id": workflow_id} result = {'called': True} @WalkoffEvent.WorkflowExecutionStart.connect def workflow_started(sender, **data): self.assertEqual(sender['id'], workflow_id) result['called'] = True response = self.post_with_status_check('/api/workflowqueue', headers=self.headers, data=json.dumps(data), status_code=SUCCESS_ASYNC, content_type="application/json") current_app.running_context.executor.wait_and_reset(1) self.assertSetEqual(set(response.keys()), {'id'})
def test_execute_multiple_workflows(self): workflow = execution_db_help.load_workflow('basicWorkflowTest', 'helloWorldWorkflow') workflow_id = workflow.id capacity = walkoff.config.Config.NUMBER_PROCESSES * walkoff.config.Config.NUMBER_THREADS_PER_PROCESS result = {'workflows_executed': 0} @WalkoffEvent.WorkflowExecutionStart.connect def started(sender, **data): self.assertEqual(sender['id'], str(workflow_id)) result['workflows_executed'] += 1 for i in range(capacity): self.app.running_context.executor.execute_workflow(workflow_id) self.app.running_context.executor.wait_and_reset(capacity) self.assertEqual(result['workflows_executed'], capacity)
def test_execute_workflow(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') action_ids = [action.id for action in workflow.actions if action.name == 'start'] setup_subscriptions_for_action(workflow.id, action_ids) start = datetime.utcnow() data = {"workflow_id": str(workflow.id)} response = self.post_with_status_check('/api/workflowqueue', headers=self.headers, data=json.dumps(data), status_code=SUCCESS_ASYNC, content_type="application/json") flask_server.running_context.executor.wait_and_reset(1) self.assertIn('id', response) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, start, datetime.utcnow())) self.assertEqual(len(actions), 1) action = actions[0] result = action['data'] self.assertEqual(result, {'status': 'Success', 'result': 'REPEATING: Hello World'})
def test_execute_multiple_workflows(self): workflow = execution_db_help.load_workflow('basicWorkflowTest', 'helloWorldWorkflow') action_ids = [ action.id for action in workflow.actions if action.name == 'start' ] setup_subscriptions_for_action(workflow.id, action_ids) capacity = walkoff.config.config.num_processes * walkoff.config.config.num_threads_per_process for i in range(capacity * 2): multiprocessedexecutor.execute_workflow(workflow.id) multiprocessedexecutor.wait_and_reset(capacity * 2) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, self.start, datetime.utcnow())) self.assertEqual(len(actions), capacity * 2)
def test_workflow_execution_events(self): workflow = execution_db_help.load_workflow('multiactionWorkflowTest', 'multiactionWorkflow') subs = { 'case1': { str(workflow.id): [ WalkoffEvent.AppInstanceCreated.signal_name, WalkoffEvent.WorkflowShutdown.signal_name ] } } case_subscription.set_subscriptions(subs) self.executor.execute_workflow(workflow.id) self.executor.wait_and_reset(1) execution_events = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'case1').first().events.all() self.assertEqual( len(execution_events), 2, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(2, len(execution_events)))
def test_execute_workflow_change_arguments(self): workflow = execution_db_help.load_workflow('test', 'helloWorldWorkflow') result = {'count': 0} @WalkoffEvent.ActionExecutionSuccess.connect def y(sender, **kwargs): result['count'] += 1 result['data'] = kwargs['data']['data'] data = {"workflow_id": str(workflow.id), "arguments": [{"name": "call", "value": "CHANGE INPUT"}]} self.post_with_status_check('/api/workflowqueue', headers=self.headers, status_code=SUCCESS_ASYNC, content_type="application/json", data=json.dumps(data)) current_app.running_context.executor.wait_and_reset(1) self.assertEqual(result['count'], 1) self.assertDictEqual(result['data'], {'status': 'Success', 'result': 'REPEATING: CHANGE INPUT'})
def test_simple_workflow_execution(self): workflow = execution_db_help.load_workflow('basicWorkflowTest', 'helloWorldWorkflow') action_ids = [ action.id for action in workflow.actions if action.name == 'start' ] setup_subscriptions_for_action(workflow.id, action_ids) multiprocessedexecutor.execute_workflow(workflow.id) multiprocessedexecutor.wait_and_reset(1) actions = [] for id_ in action_ids: actions.extend(executed_actions(id_, self.start, datetime.utcnow())) self.assertEqual(len(actions), 1) action = actions[0] result = action['data'] self.assertDictEqual(result, { 'result': "REPEATING: Hello World", 'status': 'Success' })
def test_action_execution_events(self): workflow = execution_db_help.load_workflow('basicWorkflowTest', 'helloWorldWorkflow') action_ids = [str(action.id) for action in workflow.actions] action_events = [ WalkoffEvent.ActionExecutionSuccess.signal_name, WalkoffEvent.ActionStarted.signal_name ] subs = { 'case1': {action_id: action_events for action_id in action_ids} } case_subscription.set_subscriptions(subs) self.executor.execute_workflow(workflow.id) self.executor.wait_and_reset(1) execution_events = case_database.case_db.session.query(case_database.Case) \ .filter(case_database.Case.name == 'case1').first().events.all() self.assertEqual( len(execution_events), 2, 'Incorrect length of event history. ' 'Expected {0}, got {1}'.format(2, len(execution_events)))