def test_update_workflows(self): db_wfs = wf_service.create_workflows(WORKFLOW_LIST) self.assertEqual(2, len(db_wfs)) # Workflow 1. wf1_db = self._assert_single_item(db_wfs, name='wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertEqual('reverse', wf1_spec.get_type()) self.assertIn('param1', wf1_spec.get_input()) self.assertIs(wf1_spec.get_input().get('param1'), utils.NotDefined) db_wfs = wf_service.update_workflows(UPDATED_WORKFLOW_LIST) self.assertEqual(1, len(db_wfs)) wf1_db = self._assert_single_item(db_wfs, name='wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertListEqual([], wf1_spec.get_tags()) self.assertEqual('reverse', wf1_spec.get_type()) self.assertIn('param1', wf1_spec.get_input()) self.assertIn('param2', wf1_spec.get_input()) self.assertIs(wf1_spec.get_input().get('param1'), utils.NotDefined) self.assertIs(wf1_spec.get_input().get('param2'), utils.NotDefined)
def put(self, identifier=None): """Update one or more workflows. :param identifier: Optional. If provided, it's UUID of a workflow. Only one workflow can be updated with identifier param. The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ acl.enforce('workflows:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') if scope not in resources.SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (resources.SCOPE_TYPES.values, scope)) LOG.info("Update workflow(s) [definition=%s]", definition) db_wfs = workflows.update_workflows(definition, scope=scope, identifier=identifier) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [ resources.Workflow.from_dict(wf) for wf in models_dicts ] return (workflow_list[0].to_json() if identifier else resources.Workflows(workflows=workflow_list).to_json())
def put(self, identifier=None): """Update one or more workflows. :param identifier: Optional. If provided, it's UUID of a workflow. Only one workflow can be updated with identifier param. The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ acl.enforce('workflows:update', context.ctx()) definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') if scope not in SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (SCOPE_TYPES.values, scope) ) LOG.info("Update workflow(s) [definition=%s]" % definition) db_wfs = workflows.update_workflows( definition, scope=scope, identifier=identifier ) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [Workflow.from_dict(wf) for wf in models_dicts] return (workflow_list[0].to_json() if identifier else Workflows(workflows=workflow_list).to_json())
def test_workflow_spec_cache_update_via_workflow_service(self): wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="Echo" """ wfs = wf_service.create_workflows(wf_text) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at ) self.assertEqual(1, len(wf_spec.get_tasks())) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) # Now update workflow definition and check that cache is updated too. wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="1" task2: action: std.echo output="2" """ wfs = wf_service.update_workflows(wf_text) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at ) self.assertEqual(2, len(wf_spec.get_tasks())) self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size())
def put(self): """Update one or more workflows. NOTE: The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ definition = pecan.request.text LOG.info("Update workflow(s) [definition=%s]" % definition) db_wfs = workflows.update_workflows(definition) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [Workflow.from_dict(wf) for wf in models_dicts] return Workflows(workflows=workflow_list).to_string()
def put(self): """Update one or more workflows. NOTE: The text is allowed to have definitions of multiple workflows. In this case they all will be updated. """ definition = pecan.request.text scope = pecan.request.GET.get('scope', 'private') if scope not in SCOPE_TYPES.values: raise exc.InvalidModelException( "Scope must be one of the following: %s; actual: " "%s" % (SCOPE_TYPES.values, scope) ) LOG.info("Update workflow(s) [definition=%s]" % definition) db_wfs = workflows.update_workflows(definition, scope=scope) models_dicts = [db_wf.to_dict() for db_wf in db_wfs] workflow_list = [Workflow.from_dict(wf) for wf in models_dicts] return Workflows(workflows=workflow_list).to_string()
def test_update_workflows(self): db_wfs = wf_service.create_workflows(WORKFLOW_LIST) self.assertEqual(2, len(db_wfs)) # Workflow 1. wf1_db = self._assert_single_item(db_wfs, name='wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertEqual('reverse', wf1_spec.get_type()) self.assertIn('param1', wf1_spec.get_input()) self.assertIs( wf1_spec.get_input().get('param1'), utils.NotDefined ) db_wfs = wf_service.update_workflows(UPDATED_WORKFLOW_LIST) self.assertEqual(1, len(db_wfs)) wf1_db = self._assert_single_item(db_wfs, name='wf1') wf1_spec = spec_parser.get_workflow_spec(wf1_db.spec) self.assertEqual('wf1', wf1_spec.get_name()) self.assertListEqual([], wf1_spec.get_tags()) self.assertEqual('reverse', wf1_spec.get_type()) self.assertIn('param1', wf1_spec.get_input()) self.assertIn('param2', wf1_spec.get_input()) self.assertIs( wf1_spec.get_input().get('param1'), utils.NotDefined ) self.assertIs( wf1_spec.get_input().get('param2'), utils.NotDefined )
def test_cache_workflow_spec_by_execution_id(self): wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="Echo" """ wfs = wf_service.create_workflows(wf_text) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size()) wf_def = wfs[0] wf_spec = spec_parser.get_workflow_spec_by_definition_id( wf_def.id, wf_def.updated_at) self.assertEqual(1, len(wf_spec.get_tasks())) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) with db_api.transaction(): wf_ex = db_api.create_workflow_execution({ 'id': '1-2-3-4', 'name': 'wf', 'workflow_id': wf_def.id, 'spec': wf_spec.to_dict(), 'state': states.RUNNING }) # Check that we can get a valid spec by execution id. wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id) self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks())) # Now update workflow definition and check that cache is updated too. wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="1" task2: action: std.echo output="2" """ wfs = wf_service.update_workflows(wf_text) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at) self.assertEqual(2, len(wf_spec.get_tasks())) self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size()) # Now finally update execution cache and check that we can # get a valid spec by execution id. spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec) wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id) self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))
def test_cache_workflow_spec_by_execution_id(self): wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="Echo" """ wfs = wf_service.create_workflows(wf_text) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(0, spec_parser.get_wf_definition_spec_cache_size()) wf_def = wfs[0] wf_spec = spec_parser.get_workflow_spec_by_definition_id( wf_def.id, wf_def.updated_at ) self.assertEqual(1, len(wf_spec.get_tasks())) self.assertEqual(0, spec_parser.get_wf_execution_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_ex = db_api.create_workflow_execution({ 'id': '1-2-3-4', 'name': 'wf', 'workflow_id': wf_def.id, 'spec': wf_spec.to_dict(), 'state': states.RUNNING }) # Check that we can get a valid spec by execution id. wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id ) self.assertEqual(1, len(wf_spec_by_exec_id.get_tasks())) # Now update workflow definition and check that cache is updated too. wf_text = """ version: '2.0' wf: tasks: task1: action: std.echo output="1" task2: action: std.echo output="2" """ wfs = wf_service.update_workflows(wf_text) self.assertEqual(1, spec_parser.get_wf_definition_spec_cache_size()) wf_spec = spec_parser.get_workflow_spec_by_definition_id( wfs[0].id, wfs[0].updated_at ) self.assertEqual(2, len(wf_spec.get_tasks())) self.assertEqual(2, spec_parser.get_wf_definition_spec_cache_size()) self.assertEqual(1, spec_parser.get_wf_execution_spec_cache_size()) # Now finally update execution cache and check that we can # get a valid spec by execution id. spec_parser.cache_workflow_spec_by_execution_id(wf_ex.id, wf_spec) wf_spec_by_exec_id = spec_parser.get_workflow_spec_by_execution_id( wf_ex.id ) self.assertEqual(2, len(wf_spec_by_exec_id.get_tasks()))