def test_get_users_for_project(self): """ Get all members of a project except the current user. """ user_1 = model.User("test_user1", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_1) user_2 = model.User("test_user2", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_2) user_3 = model.User("test_user3", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_3) user_4 = model.User("test_user4", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_4) user_5 = model.User("test_user5", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_5) admin = dao.get_user_by_name("test_user1") member1 = dao.get_user_by_name("test_user2") member2 = dao.get_user_by_name("test_user5") data = dict(name="test_proj", description="test_desc", users=[member1.id, member2.id]) project = ProjectService().store_project(admin, True, None, **data) all_users, members, pag = self.user_service.get_users_for_project(admin.username, project.id) self.assertEquals(len(members), 2, "More members than there should be.") self.assertEquals(len(all_users), 5, "Admin should not be viewed as member. " "Neither should users that were not part of the project's users list.") self.assertEqual(pag, 1, "Invalid total pages number.") for user in all_users: self.assertNotEqual(user.username, admin.username, "Admin is in members!")
def test_burst_delete_with_project(self): """ Test that on removal of a project all burst related data is cleared. """ TestFactory.store_burst(self.test_project.id) ProjectService().remove_project(self.test_project.id) self._check_burst_removed()
def test_get_users_second_page_del(self): """ Try to get the second page of users for a given project where only one user on last page. Then delete that user. """ for i in range(USERS_PAGE_SIZE + 1): exec 'user_' + str(i) + '= model.User("test_user' + str(i) + \ '", "test_pass", "*****@*****.**", False, "user")' exec "dao.store_entity(user_" + str(i) + ")" for i in range(USERS_PAGE_SIZE + 1): exec 'member' + str(i) + '=dao.get_user_by_name("test_user' + str(i) + '")' admin = dao.get_user_by_name("test_user1") data = dict(name='test_proj', description='test_desc', users=[eval('member' + str(i) + '.id') for i in range(USERS_PAGE_SIZE + 1)]) project = ProjectService().store_project(admin, True, None, **data) page_users, all_users, pag = self.user_service.get_users_for_project(admin.username, project.id, 2) self.assertEqual(len(page_users), 1, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE + 1, 'Not all members returned') self.assertEqual(pag, 2, 'Invalid page number returned') self.user_service.delete_user(member2.id) page_users, all_users, pag = self.user_service.get_users_for_project(admin.username, project.id, 2) self.assertEqual(len(page_users), 0, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE, 'Not all members returned') self.assertEqual(pag, 1, 'Invalid page number returned') page_users, all_users, pag = self.user_service.get_users_for_project(admin.username, project.id, 1) self.assertEqual(len(page_users), USERS_PAGE_SIZE, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE, 'Not all members returned') self.assertEqual(pag, 1, 'Invalid page number returned')
def setUp(self): """ Reset the database before each test. """ initialize_storage() user = model.User("test_user", "test_pass", "*****@*****.**", True, "user") self.test_user = dao.store_entity(user) data = dict(name='test_proj', description='desc', users=[]) self.test_project = ProjectService().store_project( self.test_user, True, None, **data) meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "INTERMEDIATE" } algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS) self.simulator_adapter = FlowService().build_adapter_instance( algo_group) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(SIMULATOR_PARAMETERS), meta=json.dumps(meta), status=model.STATUS_STARTED, method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(self.operation) SIMULATOR_PARAMETERS['connectivity'] = self._create_connectivity( self.CONNECTIVITY_NODES)
def index(self, **data): """ Login page (with or without messages). """ template_specification = dict(mainContent="user/login", title="Login", data=data) if cherrypy.request.method == 'POST': form = LoginForm() try: data = form.to_python(data) username = data[KEY_USERNAME] password = data[KEY_PASSWORD] user = self.user_service.check_login(username, password) if user is not None: common.add2session(common.KEY_USER, user) common.set_info_message('Welcome ' + username) self.logger.debug("User " + username + " has just logged in!") if user.selected_project is not None: prj = user.selected_project prj = ProjectService().find_project(prj) self._mark_selected(prj) raise cherrypy.HTTPRedirect('/user/profile') else: common.set_error_message( 'Wrong username/password, or user not yet validated...' ) self.logger.debug("Wrong username " + username + " !!!") except formencode.Invalid as excep: template_specification[ common.KEY_ERRORS] = excep.unpack_errors() return self.fill_default_attributes(template_specification)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.logger = get_logger(self.__class__.__module__) self.operation_service = OperationService() self.project_service = ProjectService() self.user_service = UserService() self.files_helper = FilesHelper()
def test_burst_delete_with_project(self): """ Test that on removal of a project all burst related data is cleared. """ self._prepare_and_launch_sync_burst() ProjectService().remove_project(self.test_project.id) self._check_burst_removed()
def test_get_users_second_page(self): """ Try to get the second page of users for a given project """ for i in range(USERS_PAGE_SIZE + 3): exec 'user_' + str(i) + '= model.User("test_user' + str( i) + '", "test_pass", "*****@*****.**", False, "user")' exec "dao.store_entity(user_" + str(i) + ")" for i in range(USERS_PAGE_SIZE + 3): exec 'member' + str(i) + '=dao.get_user_by_name("test_user' + str( i) + '")' admin = dao.get_user_by_name("test_user1") data = dict(name='test_proj', description='test_desc', users=[ eval('member' + str(i) + '.id') for i in range(USERS_PAGE_SIZE + 3) ]) project = ProjectService().store_project(admin, True, None, **data) page_users, all_users, pag = self.user_service.get_users_for_project( admin.username, project.id, 2) self.assertEqual(len(page_users), (USERS_PAGE_SIZE + 3) % USERS_PAGE_SIZE) self.assertEqual(len(all_users), USERS_PAGE_SIZE + 3, 'Not all members returned') self.assertEqual(pag, 2, 'Invalid page number returned')
def store_exploration_section(self, val_range, step, dt_group_guid): """ Launching method for further simulations. """ range_list = [float(num) for num in val_range.split(",")] step_list = [float(num) for num in step.split(",")] datatype_group_ob = ProjectService().get_datatypegroup_by_gid(dt_group_guid) operation_grp = datatype_group_ob.parent_operation_group operation_obj = self.flow_service.load_operation(datatype_group_ob.fk_from_operation) parameters = json.loads(operation_obj.parameters) range1name, range1_dict = json.loads(operation_grp.range1) range2name, range2_dict = json.loads(operation_grp.range2) parameters[RANGE_PARAMETER_1] = range1name parameters[RANGE_PARAMETER_2] = range2name ##change the existing simulator parameters to be min max step types range1_dict = {constants.ATT_MINVALUE: range_list[0], constants.ATT_MAXVALUE: range_list[1], constants.ATT_STEP: step_list[0]} range2_dict = {constants.ATT_MINVALUE: range_list[2], constants.ATT_MAXVALUE: range_list[3], constants.ATT_STEP: step_list[1]} parameters[range1name] = json.dumps(range1_dict) # this is for the x axis parameter parameters[range2name] = json.dumps(range2_dict) # this is for the y axis parameter OperationService().group_operation_launch(common.get_logged_user().id, common.get_current_project().id, operation_obj.algorithm.id, operation_obj.algorithm.fk_category, datatype_group_ob, **parameters) return [True, 'Stored the exploration material successfully']
def setup_method(self): """ Reset the database before each test. """ self.import_service = ImportService() self.project_service = ProjectService() self.zip_path = None
def initialize_two_projects(self, simple_datatype_factory, datatype_with_storage_factory): """ Creates a user, an algorithm and 2 projects Project src_project will have an operation and 2 datatypes Project dest_project will be empty. Initializes a flow and a project service """ self.clean_database(delete_folders=True) src_user = TestFactory.create_user('Source_User') self.src_usr_id = src_user.id self.src_project = TestFactory.create_project(src_user, "Source_Project") self.red_datatype = simple_datatype_factory(subject=self.GEORGE1st) self.blue_datatype = datatype_with_storage_factory( subject=self.GEORGE2nd) # create the destination project dst_user = TestFactory.create_user('Destination_User') self.dst_usr_id = dst_user.id self.dest_project = TestFactory.create_project(dst_user, "Destination_Project") self.flow_service = FlowService() self.project_service = ProjectService()
def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") data = {"test": 100} TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) datatype = self._assert_stored_dt2() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_dt2() self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter, tmp_folder, **data) self._assert_stored_dt2()
def generate_users(nr_users, nr_projects): """ The generate_users method will create a clean state db with :param nr_users: number of users to be generated (with random roles between CLINICIAN and RESEARCHER and random validated state) :param nr_projects: maximum number of projects to be generated for each user """ config.EVENTS_FOLDER = '' users = [] for i in range(nr_users): coin_flip = random.randint(0, 1) role = 'CLINICIAN' if coin_flip == 1 else 'RESEARCHER' password = md5("test").hexdigest() new_user = model.User("gen" + str(i), password, "*****@*****.**", True, role) dao.store_entity(new_user) new_user = dao.get_user_by_name("gen" + str(i)) ExtremeTestFactory.VALIDATION_DICT[new_user.id] = 0 users.append(new_user) for i in range(nr_users): current_user = dao.get_user_by_name("gen" + str(i)) projects_for_user = random.randint(0, nr_projects) for j in range(projects_for_user): data = dict(name='GeneratedProject' + str(i) + '_' + str(j), description='test_desc', users=ExtremeTestFactory.get_users_ids(random.randint(0, nr_users - 3), nr_users, current_user.id, users)) ProjectService().store_project(current_user, True, None, **data) ExtremeTestFactory.VALIDATION_DICT[current_user.id] += 1
def test_get_users_for_project(self): """ Get all members of a project except the current user. """ user_ids = [] for i in range(5): user = model_project.User("test_user" + str(i), "test_user_no" + str(i), "pass", "*****@*****.**") user = dao.store_entity(user) user_ids.append(user.id) admin = dao.get_user_by_name("test_user1") member1 = dao.get_user_by_name("test_user2") member2 = dao.get_user_by_name("test_user4") data = dict(name="test_proj", description="test_desc", users=[member1.id, member2.id]) project = ProjectService().store_project(admin, True, None, **data) all_users, members, pag = self.user_service.get_users_for_project( admin.username, project.id) assert len(members) == 3, "More members than there should be." assert len(all_users) == 5 assert pag == 1, "Invalid total pages number." admin_found_member = False for user in members: if user.username == admin.username: admin_found_member = True assert admin_found_member, "Admin is expected to be a project member" admin_found_editable = False for user in all_users: if user.username == admin.username: admin_found_editable = True assert not admin_found_editable, "Admin membership should not be editable"
def transactional_setup_method(self): """ Reset the database before each test. """ self.project_service = ProjectService() self.storage_interface = StorageInterface() self.test_user = TestFactory.create_user()
def _export_linked_datatypes(self, project, zip_file): linked_paths = ProjectService().get_linked_datatypes_storage_path( project) if not linked_paths: # do not export an empty operation return # Make a import operation which will contain links to other projects algo = dao.get_algorithm_by_module(TVB_IMPORTER_MODULE, TVB_IMPORTER_CLASS) op = model_operation.Operation(None, None, project.id, algo.id) op.project = project op.algorithm = algo op.id = 'links-to-external-projects' op.start_now() op.mark_complete(model_operation.STATUS_FINISHED) op_folder = self.files_helper.get_operation_folder( op.project.name, op.id) op_folder_name = os.path.basename(op_folder) # add linked datatypes to archive in the import operation for pth in linked_paths: zip_pth = op_folder_name + '/' + os.path.basename(pth) zip_file.write(pth, zip_pth) # remove these files, since we only want them in export archive self.files_helper.remove_folder(op_folder)
def run_export(project_id, loose_irrelevant=False): s = ProjectService() mng = ExportManager() project = s.find_project(project_id) export_file = mng.export_project(project, loose_irrelevant) print("Check the exported file: %s" % export_file)
def test_delete_dt_free_hdd_space(self, test_adapter_factory, operation_factory): """ Launch two operations and give enough available space for user so that both should finish. """ test_adapter_factory(adapter_class=TestAdapterHDDRequired) adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter3", "TestAdapterHDDRequired") view_model = adapter.get_view_model()() TvbProfile.current.MAX_DISK_SPACE = float( adapter.get_required_disk_size(view_model)) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) datatype = self._assert_stored_ddti() # Now free some space and relaunch ProjectService().remove_datatype(self.test_project.id, datatype.gid) self._assert_no_ddti() self.operation_service.initiate_operation(self.test_user, self.test_project, adapter, model_view=view_model) self._assert_stored_ddti()
def __init__(self): micro_postfix = "_%d" % int(time.time() * 1000000) # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) ad = model.Algorithm("test_module1", "classname1", alg_category.id) self.algorithm = dao.store_entity(ad) #Create an operation self.meta = {DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE} operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED) self.operation = dao.store_entity(operation)
def stop_operation(operation_id, is_group=False, remove_after_stop=False): # type: (int, bool, bool) -> bool """ Stop (also named Cancel) the operation given by operation_id, and potentially also remove it after (with all linked data). In case the Operation has a linked Burst, remove that too. :param operation_id: ID for Operation (or OperationGroup) to be canceled/removed :param is_group: When true stop all the operations from that group. :param remove_after_stop: if True, also remove the operation(s) after stopping :returns True if the stop step was successfully """ result = False if is_group: op_group = ProjectService.get_operation_group_by_id(operation_id) operations_in_group = ProjectService.get_operations_in_group( op_group) for operation in operations_in_group: result = OperationService.stop_operation( operation.id, False, remove_after_stop) or result elif dao.try_get_operation_by_id(operation_id) is not None: result = BackendClientFactory.stop_operation(operation_id) if remove_after_stop: burst_config = dao.get_burst_for_direct_operation_id( operation_id) ProjectService().remove_operation(operation_id) if burst_config is not None: result = dao.remove_entity(BurstConfiguration, burst_config.id) or result return result
def transactional_setup_method(self): """ Reset the database before each test. """ self.project_service = ProjectService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user()
def initialize_two_projects(self, dummy_datatype_index_factory, project_factory, user_factory): """ Creates a user, an algorithm and 2 projects Project src_project will have an operation and 2 datatypes Project dest_project will be empty. Initializes a flow and a project service """ self.clean_database(delete_folders=True) self.flow_service = FlowService() self.project_service = ProjectService() # Create the source project with 2 datatypes in it src_user = user_factory(username="******") self.src_usr_id = src_user.id self.src_project = project_factory(src_user, "Src_Project") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') self.red_datatype = TestFactory.import_zip_connectivity( src_user, self.src_project, zip_path, "John") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'sensors', 'eeg_unitvector_62.txt.bz2') self.blue_datatype = TestFactory.import_sensors( src_user, self.src_project, zip_path, EEG_POLYMORPHIC_IDENTITY) assert 1 == self.red_datatypes_in(self.src_project.id) assert 1 == self.blue_datatypes_in(self.src_project.id) # create the destination project empty self.dst_user = user_factory(username='******') self.dst_usr_id = self.dst_user.id self.dest_project = project_factory(self.dst_user, "Destination_Project") assert 0 == self.red_datatypes_in(self.dest_project.id) assert 0 == self.blue_datatypes_in(self.dest_project.id)
def setUp(self): """ Reset the database before each test. """ config.EVENTS_FOLDER = '' self.project_service = ProjectService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user()
def new_project(name): usr = UserService.get_administrators()[0] proj = ProjectService().store_project(usr, True, None, name=name, description=name, users=[usr]) return proj
def create_project(admin, name="TestProject", description='description', users=None): """ Create persisted Project entity, with no linked DataTypes. :returns: Project entity after persistence. """ if users is None: users = [] data = dict(name=name, description=description, users=users) return ProjectService().store_project(admin, True, None, **data)
def __init__(self): self.logger = get_logger(self.__class__.__module__) self.user_service = UserService() self.project_service = ProjectService() self.algorithm_service = AlgorithmService() self.analyze_category_link = '/flow/step_analyzers' self.analyze_adapters = None self.connectivity_tab_link = '/flow/step_connectivity' view_category = self.algorithm_service.get_visualisers_category() conn_id = self.algorithm_service.get_algorithm_by_module_and_class( IntrospectionRegistry.CONNECTIVITY_MODULE, IntrospectionRegistry.CONNECTIVITY_CLASS).id connectivity_link = self.get_url_adapter(view_category.id, conn_id) self.connectivity_submenu = [ dict(title="Large Scale Connectivity", link=connectivity_link, subsection=WebStructure.SUB_SECTION_CONNECTIVITY, description= "View Connectivity Regions. Perform Connectivity lesions"), dict(title="Local Connectivity", link='/spatial/localconnectivity/step_1/1', subsection=WebStructure.SUB_SECTION_LOCAL_CONNECTIVITY, description= "Create or view existent Local Connectivity entities.") ] allen_algo = self.algorithm_service.get_algorithm_by_module_and_class( IntrospectionRegistry.ALLEN_CREATOR_MODULE, IntrospectionRegistry.ALLEN_CREATOR_CLASS) if allen_algo and not allen_algo.removed: # Only add the Allen Creator if AllenSDK is installed allen_link = self.get_url_adapter(allen_algo.fk_category, allen_algo.id) self.connectivity_submenu.append( dict( title="Allen Connectome Builder", link=allen_link, subsection=WebStructure.SUB_SECTION_ALLEN, description= "Download data from Allen dataset and create a mouse connectome" )) self.burst_submenu = [ dict(link='/burst', subsection=WebStructure.SUB_SECTION_BURST, title='Simulation Cockpit', description='Manage simulations'), dict(link='/burst/dynamic', subsection='dynamic', title='Phase plane', description='Configure model dynamics') ]
def stop_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_service = OperationService() result = False if int(is_group) == 0: result = operation_service.stop_operation(operation_id) if remove_after_stop: ProjectService().remove_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) operations_in_group = ProjectService.get_operations_in_group(op_group) for operation in operations_in_group: tmp_res = operation_service.stop_operation(operation.id) if remove_after_stop: ProjectService().remove_operation(operation.id) result = result or tmp_res return result
def cancel_or_remove_burst(self, burst_id): """ Cancel (if burst is still running) or Remove the burst given by burst_id. :returns True when Remove operation was done and False when Cancel """ burst_entity = dao.get_burst_by_id(burst_id) if burst_entity.status == burst_entity.BURST_RUNNING: self.stop_burst(burst_entity) return False service = ProjectService() ## Remove each DataType in current burst. ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes. datatypes = dao.get_all_datatypes_in_burst(burst_id) ## Get operations linked to current burst before removing the burst or else ## the burst won't be there to identify operations any more. remaining_ops = dao.get_operations_in_burst(burst_id) #Remove burst first to delete work-flow steps which still hold foreign keys to operations. correct = dao.remove_entity(burst_entity.__class__, burst_id) if not correct: raise RemoveDataTypeException("Could not remove Burst entity!") for datatype in datatypes: service.remove_datatype(burst_entity.fk_project, datatype.gid, False) ## Remove all Operations remained. correct = True remaining_op_groups = set() project = dao.get_project_by_id(burst_entity.fk_project) for oper in remaining_ops: is_remaining = dao.get_generic_entity(oper.__class__, oper.id) if len(is_remaining) == 0: ### Operation removed cascaded. continue if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups: is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group) if len(is_remaining) > 0: remaining_op_groups.add(oper.fk_operation_group) correct = correct and dao.remove_entity( model.OperationGroup, oper.fk_operation_group) correct = correct and dao.remove_entity(oper.__class__, oper.id) service.structure_helper.remove_operation_data( project.name, oper.id) if not correct: raise RemoveDataTypeException( "Could not remove Burst because a linked operation could not be dropped!!" ) return True
def index(self, **data): """ Login page (with or without messages). """ template_specification = dict(mainContent="user/login", title="Login", data=data) self._set_base_url() if cherrypy.request.method == 'POST': keycloak_login = TvbProfile.current.KEYCLOAK_LOGIN_ENABLED form = LoginForm() if not keycloak_login else KeycloakLoginForm() try: data = form.to_python(data) if keycloak_login: auth_token = data[KEY_AUTH_TOKEN] kc_user_info = AuthorizationManager( TvbProfile.current.KEYCLOAK_WEB_CONFIG ).get_keycloak_instance().userinfo(auth_token) user = self.user_service.get_external_db_user(kc_user_info) else: username = data[KEY_USERNAME] password = data[KEY_PASSWORD] user = self.user_service.check_login(username, password) if user is not None: common.add2session(common.KEY_USER, user) common.set_info_message('Welcome ' + user.display_name) self.logger.debug("User " + user.username + " has just logged in!") if user.selected_project is not None: prj = user.selected_project prj = ProjectService().find_project(prj) self._mark_selected(prj) raise cherrypy.HTTPRedirect('/user/profile') elif not keycloak_login: common.set_error_message( 'Wrong username/password, or user not yet validated...' ) self.logger.debug("Wrong username " + username + " !!!") else: common.set_error_message( 'Your account is not validated. Please contact us at [email protected] for more details' ) self.logger.debug("Invalidated account") template_specification[common.KEY_ERRORS] = { 'invalid_user': True } except formencode.Invalid as excep: template_specification[ common.KEY_ERRORS] = excep.unpack_errors() return self.fill_default_attributes(template_specification)
def build(admin, name="TestProject", description='description', users=None): """ Create persisted Project entity, with no linked DataTypes. :returns: Project entity after persistence. """ project = dao.get_generic_entity(Project, name, "name") if project: return project[0] if users is None: users = [] data = dict(name=name, description=description, users=users) return ProjectService().store_project(admin, True, None, **data)