def setUp(self): """ Reset the database before each test. """ # self.clean_database() self.project_service = ProjectService() self.test_user = TestFactory.create_user()
def stop_burst_operation(self, operation_id, is_group, remove_after_stop=False): """ For a given operation id that is part of a burst just stop the given burst. """ operation_id = int(operation_id) if int(is_group) == 0: operation = self.flow_service.load_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) first_op = ProjectService.get_operations_in_group(op_group)[0] operation = self.flow_service.load_operation(int(first_op.id)) try: burst_service = BurstService() burst_service.stop_burst(operation.burst) if remove_after_stop: current_burst = base.get_from_session(base.KEY_BURST_CONFIG) if current_burst and current_burst.id == operation.burst.id: base.remove_from_session(base.KEY_BURST_CONFIG) burst_service.remove_burst(operation.burst.id) return True except Exception, ex: self.logger.exception(ex) return False
def setUp(self): """ Reset the database before each test. """ EVENTS_FOLDER = '' # self.clean_database() self.project_service = ProjectService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user()
def setUp(self): """ Prepare before each test. """ self.project_service = ProjectService() self.flow_service = FlowService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure") self.relevant_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.RELEVANT_VIEW) self.full_filter = StaticFiltersFactory.build_datatype_filters(single_filter=StaticFiltersFactory.FULL_VIEW)
def reload_burst_operation(self, operation_id, is_group, **_): """ Find out from which burst was this operation launched. Set that burst as the selected one and redirect to the burst page. """ is_group = int(is_group) if not is_group: operation = self.flow_service.load_operation(int(operation_id)) else: op_group = ProjectService.get_operation_group_by_id(operation_id) first_op = ProjectService.get_operations_in_group(op_group)[0] operation = self.flow_service.load_operation(int(first_op.id)) operation.burst.prepare_after_load() base.add2session(base.KEY_BURST_CONFIG, operation.burst) raise cherrypy.HTTPRedirect("/burst/")
def remove_burst(self, burst_id): """ Remove the burst given by burst_id. If burst is still running just stop it. """ burst_entity = dao.get_burst_by_id(burst_id) if burst_entity.status == burst_entity.BURST_RUNNING: self.stop_burst(burst_entity) return False service = ProjectService() ## Remove each DataType in current burst. ## We can not leave all on cascade, because it won't work on SQLite for mapped dataTypes. datatypes = dao.get_all_datatypes_in_burst(burst_id) ## Get operations linked to current burst before removing the burst or else ## the burst won't be there to identify operations any more. remaining_ops = dao.get_operations_in_burst(burst_id) #Remove burst first to delete work-flow steps which still hold foreign keys to operations. correct = dao.remove_entity(burst_entity.__class__, burst_id) if not correct: raise RemoveDataTypeException("Could not remove Burst entity!") for datatype in datatypes: service.remove_datatype(burst_entity.fk_project, datatype.gid, True) ## Remove all Operations remained. correct = True remaining_op_groups = set() for oper in remaining_ops: is_remaining = dao.get_generic_entity(oper.__class__, oper.id) if len(is_remaining) == 0: ### Operation removed cascaded. continue if oper.fk_operation_group is not None and oper.fk_operation_group not in remaining_op_groups: is_remaining = dao.get_generic_entity(model.OperationGroup, oper.fk_operation_group) if len(is_remaining) > 0: remaining_op_groups.add(oper.fk_operation_group) correct = correct and dao.remove_entity( model.OperationGroup, oper.fk_operation_group) correct = correct and dao.remove_entity(oper.__class__, oper.id) if not correct: raise RemoveDataTypeException( "Could not remove Burst because a linked operation could not be dropped!!" ) return True
def generate_users(nr_users, nr_projects): """ The generate_users method will create a clean state db with :param nr_users = number of users to be generated (with random roles between CLINICIAN and RESEARCHER and random validated state) :param nr_projects =maximum number of projects to be generated for each user """ EVENTS_FOLDER = '' users = [] for i in range(nr_users): coin_flip = random.randint(0, 1) role = 'CLINICIAN' if coin_flip == 1 else 'RESEARCHER' password = md5("test").hexdigest() new_user = model.User("gen" + str(i), password, "*****@*****.**", True, role) dao.store_entity(new_user) new_user = dao.get_user_by_name("gen" + str(i)) ExtremeTestFactory.VALIDATION_DICT[new_user.id] = 0 users.append(new_user) for i in range(nr_users): current_user = dao.get_user_by_name("gen" + str(i)) projects_for_user = random.randint(0, nr_projects) for j in range(projects_for_user): data = dict(name='GeneratedProject' + str(i) + '_' + str(j), description='test_desc', users=ExtremeTestFactory.get_users_ids( random.randint(0, nr_users - 3), nr_users, current_user.id, users)) ProjectService().store_project(current_user, True, None, **data) ExtremeTestFactory.VALIDATION_DICT[current_user.id] += 1
def test_get_users_second_page(self): """ Try to get the second page of users for a given project """ for i in range(USERS_PAGE_SIZE + 3): exec 'user_' + str(i) + '= model.User("test_user' + str( i) + '", "test_pass", "*****@*****.**", False, "user")' exec "dao.store_entity(user_" + str(i) + ")" for i in range(USERS_PAGE_SIZE + 3): exec 'member' + str(i) + '=dao.get_user_by_name("test_user' + str( i) + '")' admin = dao.get_user_by_name("test_user1") data = dict(name='test_proj', description='test_desc', users=[ eval('member' + str(i) + '.id') for i in range(USERS_PAGE_SIZE + 3) ]) project = ProjectService().store_project(admin, True, None, **data) page_users, all_users, pag = self.user_service.get_users_for_project( admin.username, project.id, 2) self.assertEqual(len(page_users), (USERS_PAGE_SIZE + 3) % USERS_PAGE_SIZE) self.assertEqual(len(all_users), USERS_PAGE_SIZE + 3, 'Not all members returned') self.assertEqual(pag, 2, 'Invalid page number returned')
def index(self, **data): """ Login page (with or without messages). """ template_specification = dict(mainContent="login", title="Login", data=data) if cherrypy.request.method == 'POST': form = LoginForm() try: data = form.to_python(data) username = data[KEY_USERNAME] password = data[KEY_PASSWORD] user = self.user_service.check_login(username, password) if user is not None: basecontroller.add2session(basecontroller.KEY_USER, user) basecontroller.set_info_message('Welcome ' + username) self.logger.debug("User " + username + " has just logged in!") if user.selected_project is not None: prj = user.selected_project prj = ProjectService().find_project(prj) self._mark_selected(prj) raise cherrypy.HTTPRedirect('/user/profile') else: basecontroller.set_error_message( 'Wrong username/password, or user not yet validated...' ) self.logger.debug("Wrong username " + username + " !!!") except formencode.Invalid, excep: template_specification[ basecontroller.KEY_ERRORS] = excep.unpack_errors()
def __init__(self): now = datetime.now() micro_postfix = "_%d" % now.microsecond # Here create all structures needed later for data types creation self.files_helper = FilesHelper() # First create user user = model.User("datatype_factory_user" + micro_postfix, "test_pass", "*****@*****.**" + micro_postfix, True, "user") self.user = dao.store_entity(user) # Now create a project project_service = ProjectService() data = dict(name='DatatypesFactoryProject' + micro_postfix, description='test_desc', users=[]) self.project = project_service.store_project(self.user, True, None, **data) # Create algorithm alg_category = model.AlgorithmCategory('one', True) dao.store_entity(alg_category) alg_group = model.AlgorithmGroup("test_module1", "classname1", alg_category.id) dao.store_entity(alg_group) algorithm = model.Algorithm(alg_group.id, 'id', name='', req_data='', param_name='', output='') self.algorithm = dao.store_entity(algorithm) #Create an operation self.meta = { DataTypeMetaData.KEY_SUBJECT: self.USER_FULL_NAME, DataTypeMetaData.KEY_STATE: self.DATATYPE_STATE } operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(operation)
def setUp(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter( test_project=self.test_project) TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project) self.zip_path = None
def setUp(self): """ Prepare the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter( test_project=self.test_project) result = self.get_all_datatypes() self.assertEqual(len(result), 0, "There should be no data type in DB") TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
def create_project(admin, name="TestProject", description='description', users=[]): """ Create persisted Project entity, with no linked DataTypes. :return: Project entity after persistence. """ data = dict(name=name, description=description, users=users) return ProjectService().store_project(admin, True, None, **data)
def prepare_group_launch(self, group_gid, step_key, adapter_key, **data): """ Recieves as input a group gid and an algorithm given by category and id, along with data that gives the name of the required input parameter for the algorithm. Having these generate a range of gid's for all the datatypes in the group and launch a new operation group. """ prj_service = ProjectService() dt_group = prj_service.get_datatypegroup_by_gid(group_gid) datatypes = prj_service.get_datatypes_from_datatype_group(dt_group.id) range_param_name = data['range_param_name'] del data['range_param_name'] data[PARAM_RANGE_1] = range_param_name data[range_param_name] = ','.join([dt.gid for dt in datatypes]) OperationService().group_operation_launch( base.get_logged_user().id, base.get_current_project().id, int(adapter_key), int(step_key), **data) redirect_url = self._compute_back_link('operations', base.get_current_project()) raise cherrypy.HTTPRedirect(redirect_url)
def stop_operation(self, operation_id, is_group, remove_after_stop=False): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_service = OperationService() result = False if int(is_group) == 0: result = operation_service.stop_operation(operation_id) if remove_after_stop: ProjectService().remove_operation(operation_id) else: op_group = ProjectService.get_operation_group_by_id(operation_id) operations_in_group = ProjectService.get_operations_in_group( op_group) for operation in operations_in_group: tmp_res = operation_service.stop_operation(operation.id) if remove_after_stop: ProjectService().remove_operation(operation.id) result = result or tmp_res return result
class EventHandlerTest(BaseTestCase): """ This class contains tests for the tvb.core.services.projectservice module. """ def setUp(self): """ Reset the database before each test. """ # self.clean_database() self.project_service = ProjectService() self.test_user = TestFactory.create_user() def tearDown(self): """ Cleans the environment after testing (database and executors dictionary) """ self.clean_database() eventhandler.EXECUTORS_DICT = {} def test_handle_event(self): """ Test a defined handler for the store project method. """ path_to_events = os.path.dirname(__file__) eventhandler.read_events([path_to_events]) data = dict(name="test_project", description="test_description", users=[]) initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 0, "Database reset probably failed!") test_project = self.project_service.store_project( self.test_user, True, None, **data) # Operations will start asynchronously; Give them time. time.sleep(1) gid = dao.get_last_data_with_uid("test_uid") self.assertTrue(gid is not None, "Nothing was stored in database!") datatype = dao.get_datatype_by_gid(gid) self.assertEqual(datatype.type, "Datatype1", "Wrong data stored!") self.project_service._remove_project_node_files(test_project.id, gid)
def test_delete_dt_free_HDD_space(self): """ Launch two operations and give enough available space for user so that both should finish. """ module = "tvb_test.adapters.testadapter3" class_name = "TestAdapterHDDRequired" group = dao.find_group(module, class_name) adapter = FlowService().build_adapter_instance(group) output = adapter.get_output() output_type = output[0].__name__ data = {"test": 100} TVBSettings.MAX_DISK_SPACE = float( adapter.get_required_disk_size(**data)) tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP") dts = dao.get_values_of_datatype(self.test_project.id, Datatype2) self.assertEqual(len(dts), 0) self.operation_service.initiate_operation( self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.") #Now update the maximum disk size to be the size of the previously resulted datatypes (transform from kB to MB) #plus what is estimated to be required from the next one (transform from B to MB) ProjectService().remove_datatype(self.test_project.id, datatype.gid) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2) self.assertEqual(len(dts), 0) self.operation_service.initiate_operation( self.test_user, self.test_project.id, adapter, tmp_folder, method_name=ABCAdapter.LAUNCH_METHOD, **data) dts = dao.get_values_of_datatype(self.test_project.id, Datatype2) self.assertEqual(len(dts), 1) datatype = dao.get_datatype_by_id(dts[0][0]) self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.") self.assertEqual(datatype.type, output_type, "Wrong data stored.")
def stop_operation(self, operation_id, is_group): """ Stop the operation given by operation_id. If is_group is true stop all the operations from that group. """ operation_service = OperationService() result = False if int(is_group) == 0: result = operation_service.stop_operation(operation_id) else: operation_id = operation_id.split(',')[0] operation = self.flow_service.load_operation(operation_id) operations_in_group = ProjectService.get_operations_in_group( operation.operation_group) for operation in operations_in_group: tmp_res = operation_service.stop_operation(operation.id) result = result or tmp_res return result
def _get_effective_data_type(self, data): """ This method returns the data type for the provided data. - If current data is a simple data type is returned. - If it is an data type group, we return the first element. Only one element is necessary since all group elements are the same type. """ # first check if current data is a DataTypeGroup if self.is_data_a_group(data): data_types = ProjectService.get_datatypes_from_datatype_group( data.id) if data_types is not None and len(data_types) > 0: # Since all objects in a group are the same type it's enough return ABCAdapter.load_entity_by_gid(data_types[0].gid) else: return None else: return data
def test_get_users_second_page_del(self): """ Try to get the second page of users for a given project where only one user on last page. Then delete that user. """ for i in range(USERS_PAGE_SIZE + 1): exec 'user_' + str(i) + '= model.User("test_user' + str(i) + \ '", "test_pass", "*****@*****.**", False, "user")' exec "dao.store_entity(user_" + str(i) + ")" for i in range(USERS_PAGE_SIZE + 1): exec 'member' + str(i) + '=dao.get_user_by_name("test_user' + str( i) + '")' admin = dao.get_user_by_name("test_user1") data = dict(name='test_proj', description='test_desc', users=[ eval('member' + str(i) + '.id') for i in range(USERS_PAGE_SIZE + 1) ]) project = ProjectService().store_project(admin, True, None, **data) page_users, all_users, pag = self.user_service.get_users_for_project( admin.username, project.id, 2) self.assertEqual(len(page_users), 1, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE + 1, 'Not all members returned') self.assertEqual(pag, 2, 'Invalid page number returned') self.user_service.delete_user(member2.id) page_users, all_users, pag = self.user_service.get_users_for_project( admin.username, project.id, 2) self.assertEqual(len(page_users), 0, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE, 'Not all members returned') self.assertEqual(pag, 1, 'Invalid page number returned') page_users, all_users, pag = self.user_service.get_users_for_project( admin.username, project.id, 1) self.assertEqual(len(page_users), USERS_PAGE_SIZE, 'Paging not working properly') self.assertEqual(len(all_users), USERS_PAGE_SIZE, 'Not all members returned') self.assertEqual(pag, 1, 'Invalid page number returned')
def _get_all_data_types_arr(self, data): """ This method builds an array with all data types to be processed later. - If current data is a simple data type is added to an array. - If it is an data type group all its children are loaded and added to array. """ # first check if current data is a DataTypeGroup if self.is_data_a_group(data): data_types = ProjectService.get_datatypes_from_datatype_group( data.id) result = [] if data_types is not None and len(data_types) > 0: for data_type in data_types: entity = ABCAdapter.load_entity_by_gid(data_type.gid) result.append(entity) return result else: return [data]
def setUp(self): """ Reset the database before each test. """ initialize_storage() user = model.User("test_user", "test_pass", "*****@*****.**", True, "user") self.test_user = dao.store_entity(user) data = dict(name='test_proj', description='desc', users=[]) self.test_project = ProjectService().store_project(self.test_user, True, None, **data) meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW"} algo_group = dao.find_group(SIMULATOR_MODULE, SIMULATOR_CLASS) self.simulator_adapter = FlowService().build_adapter_instance(algo_group) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(SIMULATOR_PARAMETERS), meta=json.dumps(meta), status=model.STATUS_STARTED, method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(self.operation) SIMULATOR_PARAMETERS['connectivity'] = self._create_connectivity(self.CONNECTIVITY_NODES)
def test_get_users_for_project(self): """ Get all members of a project except the current user. """ user_1 = model.User("test_user1", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_1) user_2 = model.User("test_user2", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_2) user_3 = model.User("test_user3", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_3) user_4 = model.User("test_user4", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_4) user_5 = model.User("test_user5", "test_pass", "*****@*****.**", False, "user") dao.store_entity(user_5) admin = dao.get_user_by_name("test_user1") member1 = dao.get_user_by_name("test_user2") member2 = dao.get_user_by_name("test_user5") data = dict(name="test_proj", description="test_desc", users=[member1.id, member2.id]) project = ProjectService().store_project(admin, True, None, **data) all_users, members, pag = self.user_service.get_users_for_project( admin.username, project.id) self.assertEquals(len(members), 2, "More members than there should be.") self.assertEquals( len(all_users), 5, "Admin should not be viewed as member. " "Neither should users that were not part of the project's users list." ) self.assertEqual(pag, 1, "Invalid total pages number.") for user in all_users: self.assertNotEqual(user.username, admin.username, "Admin is in members!")
class ProjectController(bc.BaseController): """ Displays pages which deals with Project data management. """ PRROJECTS_FOR_LINK_KEY = "projectsforlink" PRROJECTS_LINKED_KEY = "projectslinked" KEY_OPERATION_FILTERS = "operationfilters" def __init__(self): super(ProjectController, self).__init__() self.project_service = ProjectService() self.import_service = ImportService() @cherrypy.expose @using_template('base_template') @bc.settings() @logged() def index(self): """ Display project main-menu. Choose one project to work with. """ current_project = bc.get_current_project() if current_project is None: raise cherrypy.HTTPRedirect("/project/viewall") template_specification = dict(mainContent="project_submenu", title="TVB Project Menu") return self.fill_default_attributes(template_specification) @cherrypy.expose @using_template('base_template') @bc.settings() @logged() def viewall(self, create=False, page=1, selected_project_id=None, **_): """ Display all existent projects. Choose one project to work with. """ page = int(page) if cherrypy.request.method == 'POST' and create: raise cherrypy.HTTPRedirect('/project/editone') current_user_id = bc.get_logged_user().id ## Select project if user choose one. if selected_project_id is not None: try: selected_project = self.project_service.find_project( selected_project_id) self._mark_selected(selected_project) except ProjectServiceException, excep: self.logger.error(excep) self.logger.warning("Could not select project: " + str(selected_project_id)) bc.set_error_message("Could not select project: " + str(selected_project_id)) #Prepare template response prjs, pages_no = self.project_service.retrieve_projects_for_user( current_user_id, page) template_specification = dict(mainContent="project/viewall", title="Available TVB Projects", projectsList=prjs, page_number=page, total_pages=pages_no) return self.fill_default_attributes(template_specification, 'list')
class ProjectServiceTest(TransactionalTestCase): """ This class contains tests for the tvb.core.services.projectservice module. """ def setUp(self): """ Reset the database before each test. """ EVENTS_FOLDER = '' # self.clean_database() self.project_service = ProjectService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user() def tearDown(self): """ Remove project folders and clean up database. """ created_projects = dao.get_projects_for_user(self.test_user.id) for project in created_projects: self.structure_helper.remove_project_structure(project.name) self.delete_project_folders() # self.clean_database() def test_create_project_happy_flow(self): """ Standard flow for creating a new project. """ user1 = TestFactory.create_user('test_user1') user2 = TestFactory.create_user('test_user2') initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 0, "Database reset probably failed!") TestFactory.create_project(self.test_user, 'test_project', users=[user1.id, user2.id]) resulting_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(resulting_projects), 1, "Project with valid data not inserted!") project = resulting_projects[0] if project.name == "test_project": self.assertEqual(project.description, "description", "Description do no match") users_for_project = dao.get_members_of_project(project.id) for user in users_for_project: self.assertTrue(user.id in [user1.id, user2.id], "Users not stored properly.") self.assertTrue( os.path.exists( os.path.join(cfg.TVB_STORAGE, FilesHelper.PROJECTS_FOLDER, "test_project")), "Folder for project was not created") def test_create_project_empty_name(self): """ Creating a project with an empty name. """ data = dict(name="", description="test_description", users=[]) initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 0, "Database reset probably failed!") self.assertRaises(ProjectServiceException, self.project_service.store_project, self.test_user, True, None, **data) def test_edit_project_happy_flow(self): """ Standard flow for editing an existing project. """ selected_project = TestFactory.create_project(self.test_user, 'test_proj') proj_root = self.structure_helper.get_project_folder(selected_project) initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!") edited_data = dict(name="test_project", description="test_description", users=[]) edited_project = self.project_service.store_project( self.test_user, False, selected_project.id, **edited_data) self.assertFalse(os.path.exists(proj_root), "Previous folder not deleted") proj_root = self.structure_helper.get_project_folder(edited_project) self.assertTrue(os.path.exists(proj_root), "New folder not created!") self.assertNotEqual(selected_project.name, edited_project.name, "Project was no changed!") def test_edit_project_unexisting(self): """ Trying to edit an un-existing project. """ selected_project = TestFactory.create_project(self.test_user, 'test_proj') self.structure_helper.get_project_folder(selected_project) initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 1, "Database initialization probably failed!") data = dict(name="test_project", description="test_description", users=[]) self.assertRaises(ProjectServiceException, self.project_service.store_project, self.test_user, False, 99, **data) def test_find_project_happy_flow(self): """ Standard flow for finding a project by it's id. """ initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 0, "Database reset probably failed!") inserted_project = TestFactory.create_project(self.test_user, 'test_project') self.assertTrue( self.project_service.find_project(inserted_project.id) is not None, "Project not found !") dao_returned_project = dao.get_project_by_id(inserted_project.id) service_returned_project = self.project_service.find_project( inserted_project.id) self.assertEqual( dao_returned_project.id, service_returned_project.id, "Data returned from service is different from data returned by DAO." ) self.assertEqual( dao_returned_project.name, service_returned_project.name, "Data returned from service is different than data returned by DAO." ) self.assertEqual( dao_returned_project.description, service_returned_project.description, "Data returned from service is different from data returned by DAO." ) self.assertEqual( dao_returned_project.members, service_returned_project.members, "Data returned from service is different from data returned by DAO." ) def test_find_project_unexisting(self): """ Searching for an un-existing project. """ data = dict(name="test_project", description="test_description", users=[]) initial_projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(initial_projects), 0, "Database reset probably failed!") self.project_service.store_project(self.test_user, True, None, **data) self.assertRaises(ProjectServiceException, self.project_service.find_project, 99) def test_retrieve_projects_for_user(self): """ Test for retrieving the projects for a given user. One page only. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reset properly!") TestFactory.create_project(self.test_user, 'test_proj') TestFactory.create_project(self.test_user, 'test_proj1') TestFactory.create_project(self.test_user, 'test_proj2') user1 = TestFactory.create_user('another_user') TestFactory.create_project(user1, 'test_proj3') projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(projects), 3, "Projects not retrieved properly!") for project in projects: self.assertNotEquals( project.name, "test_project3", "This project should not have been retrieved") def test_retrieve_1project_3usr(self): """ One user as admin, two users as members, getting projects for admin and for any of the members should return one. """ member1 = TestFactory.create_user("member1") member2 = TestFactory.create_user("member2") TestFactory.create_project(self.test_user, 'Testproject', users=[member1.id, member2.id]) projects = self.project_service.retrieve_projects_for_user( self.test_user.id, 1)[0] self.assertEqual(len(projects), 1, "Projects not retrieved properly!") projects = self.project_service.retrieve_projects_for_user( member1.id, 1)[0] self.assertEqual(len(projects), 1, "Projects not retrieved properly!") projects = self.project_service.retrieve_projects_for_user( member2.id, 1)[0] self.assertEqual(len(projects), 1, "Projects not retrieved properly!") def test_retrieve_3projects_3usr(self): """ Three users, 3 projects. Structure of db: proj1: {admin: user1, members: [user2, user3]} proj2: {admin: user2, members: [user1]} proj3: {admin: user3, members: [user1, user2]} Check valid project returns for all the users. """ member1 = TestFactory.create_user("member1") member2 = TestFactory.create_user("member2") member3 = TestFactory.create_user("member3") TestFactory.create_project(member1, 'TestProject1', users=[member2.id, member3.id]) TestFactory.create_project(member2, 'TestProject2', users=[member1.id]) TestFactory.create_project(member3, 'TestProject3', users=[member1.id, member2.id]) projects = self.project_service.retrieve_projects_for_user( member1.id, 1)[0] self.assertEqual(len(projects), 3, "Projects not retrieved properly!") projects = self.project_service.retrieve_projects_for_user( member2.id, 1)[0] self.assertEqual(len(projects), 3, "Projects not retrieved properly!") projects = self.project_service.retrieve_projects_for_user( member3.id, 1)[0] self.assertEqual(len(projects), 2, "Projects not retrieved properly!") def test_retrieve_projects_random(self): """ Generate a large number of users/projects, and validate the results. """ ExtremeTestFactory.generate_users(NR_USERS, MAX_PROJ_PER_USER) for i in range(NR_USERS): current_user = dao.get_user_by_name("gen" + str(i)) expected_projects = ExtremeTestFactory.VALIDATION_DICT[ current_user.id] if expected_projects % PROJECTS_PAGE_SIZE == 0: expected_pages = expected_projects / PROJECTS_PAGE_SIZE exp_proj_per_page = PROJECTS_PAGE_SIZE else: expected_pages = expected_projects / PROJECTS_PAGE_SIZE + 1 exp_proj_per_page = expected_projects % PROJECTS_PAGE_SIZE if expected_projects == 0: expected_pages = 0 exp_proj_per_page = 0 projects, pages = self.project_service.retrieve_projects_for_user( current_user.id, expected_pages) self.assertEqual( len(projects), exp_proj_per_page, "Projects not retrieved properly! Expected:" + str(exp_proj_per_page) + "but got:" + str(len(projects))) self.assertEqual(pages, expected_pages, "Pages not retrieved properly!") for folder in os.listdir(cfg.TVB_STORAGE): full_path = os.path.join(cfg.TVB_STORAGE, folder) if os.path.isdir(full_path) and folder.startswith('Generated'): shutil.rmtree(full_path) def test_retrieve_projects_page2(self): """ Test for retrieving the second page projects for a given user. """ for i in range(PROJECTS_PAGE_SIZE + 3): TestFactory.create_project(self.test_user, 'test_proj' + str(i)) projects, pages = self.project_service.retrieve_projects_for_user( self.test_user.id, 2) self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 3) % PROJECTS_PAGE_SIZE, "Pagination inproper.") self.assertEqual(pages, 2, 'Wrong number of pages retrieved.') def test_retrieve_projects_and_del(self): """ Test for retrieving the second page projects for a given user. """ created_projects = [] for i in range(PROJECTS_PAGE_SIZE + 1): created_projects.append( TestFactory.create_project(self.test_user, 'test_proj' + str(i))) projects, pages = self.project_service.retrieve_projects_for_user( self.test_user.id, 2) self.assertEqual(len(projects), (PROJECTS_PAGE_SIZE + 1) % PROJECTS_PAGE_SIZE, "Pagination improper.") self.assertEqual(pages, (PROJECTS_PAGE_SIZE + 1) / PROJECTS_PAGE_SIZE + 1, 'Wrong number of pages') self.project_service.remove_project(created_projects[1].id) projects, pages = self.project_service.retrieve_projects_for_user( self.test_user.id, 2) self.assertEqual(len(projects), 0, "Pagination improper.") self.assertEqual(pages, 1, 'Wrong number of pages retrieved.') projects, pages = self.project_service.retrieve_projects_for_user( self.test_user.id, 1) self.assertEqual(len(projects), PROJECTS_PAGE_SIZE, "Pagination improper.") self.assertEqual(pages, 1, 'Wrong number of pages retrieved.') def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reseted!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append( TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder( test_proj[0]) result_meta = { DataTypeMetaData.KEY_OPERATION_TYPE: "Upload", DataTypeMetaData.KEY_AUTHOR: "John Doe", DataTypeMetaData.KEY_SUBJECT: "subj1", DataTypeMetaData.KEY_STATE: "test_state", DataTypeMetaData.KEY_NODE_TYPE: "test_data", DataTypeMetaData.KEY_DATE: "test_date", DataTypeMetaData.KEY_GID: generate_guid() } entity = dao.store_entity(model.AlgorithmCategory("category")) entity = dao.store_entity( model.AlgorithmGroup("module", "classname", entity.id)) entity = dao.store_entity(model.Algorithm(entity.id, "algo")) operation = model.Operation(self.test_user.id, test_proj[0].id, entity.id, "") operation = dao.store_entity(operation) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) entity = DataTypeMetaData(result_meta) datatype = dao.store_entity( model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user( self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names) def test_remove_project_happy_flow(self): """ Standard flow for deleting a project. """ inserted_project = TestFactory.create_project(self.test_user, 'test_proj') project_root = self.structure_helper.get_project_folder( inserted_project) projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(projects), 1, "Initializations failed!") self.assertTrue(os.path.exists(project_root), "Something failed at insert time!") self.project_service.remove_project(inserted_project.id) projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(projects), 0, "Project was not deleted!") self.assertFalse(os.path.exists(project_root), "Root folder not deleted!") def test_remove_project_wrong_id(self): """ Flow for deleting a project giving an un-existing id. """ TestFactory.create_project(self.test_user, 'test_proj') projects = dao.get_projects_for_user(self.test_user.id) self.assertEqual(len(projects), 1, "Initializations failed!") self.assertRaises(ProjectServiceException, self.project_service.remove_project, 99) @staticmethod def _create_value_wrapper(test_user, test_project=None): """ Creates a ValueWrapper dataType, and the associated parent Operation. This is also used in ProjectStructureTest. """ if test_project is None: test_project = TestFactory.create_project(test_user, 'test_proj') operation = TestFactory.create_operation(test_user=test_user, test_project=test_project) value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value") value_wrapper.type = "ValueWrapper" value_wrapper.module = "tvb.datatypes.mapped_values" value_wrapper.subject = "John Doe" value_wrapper.state = "RAW_STATE" value_wrapper.set_operation_id(operation.id) adapter_instance = StoreAdapter([value_wrapper]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) all_value_wrappers = FlowService().get_available_datatypes( test_project.id, "tvb.datatypes.mapped_values.ValueWrapper") if len(all_value_wrappers) != 1: raise Exception("Should be only one value wrapper.") result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2]) return test_project, result_vw.gid, operation.gid def __check_meta_data(self, expected_meta_data, new_datatype): """Validate Meta-Data""" mapp_keys = { DataTypeMetaData.KEY_SUBJECT: "subject", DataTypeMetaData.KEY_STATE: "state" } for key, value in expected_meta_data.iteritems(): if key in mapp_keys: self.assertEqual(value, getattr(new_datatype, mapp_keys[key])) elif key == DataTypeMetaData.KEY_OPERATION_TAG: if DataTypeMetaData.KEY_OP_GROUP_ID in expected_meta_data: ## We have a Group to check op_group = new_datatype.parent_operation.fk_operation_group op_group = dao.get_generic_entity(OperationGroup, op_group)[0] self.assertEqual(value, op_group.name) else: self.assertEqual(value, new_datatype.parent_operation.user_group) def test_remove_project_node(self): """ Test removing of a node from a project. """ inserted_project, gid, gid_op = self._create_value_wrapper( self.test_user) project_to_link = model.Project("Link", self.test_user.id, "descript") project_to_link = dao.store_entity(project_to_link) exact_data = dao.get_datatype_by_gid(gid) dao.store_entity(model.Links(exact_data.id, project_to_link.id)) self.assertTrue( dao.get_datatype_by_gid(gid) is not None, "Initialization problem!") operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id op_folder = self.structure_helper.get_project_folder( "test_proj", str(operation_id)) self.assertTrue(os.path.exists(op_folder)) sub_files = os.listdir(op_folder) self.assertEqual(2, len(sub_files)) ### Validate that no more files are created than needed. self.project_service._remove_project_node_files( inserted_project.id, gid) sub_files = os.listdir(op_folder) self.assertEqual(1, len(sub_files)) ### Validate that Operation GID_file is still there. op_folder = self.structure_helper.get_project_folder( "Link", str(operation_id + 1)) sub_files = os.listdir(op_folder) self.assertEqual(1, len(sub_files)) self.assertTrue( dao.get_datatype_by_gid(gid) is not None, "Data should still be there because of links") self.project_service._remove_project_node_files( project_to_link.id, gid) self.assertTrue(dao.get_datatype_by_gid(gid) is None) sub_files = os.listdir(op_folder) self.assertEqual(0, len(sub_files)) def test_update_meta_data_simple(self): """ Test the new update metaData for a simple data that is not part of a group. """ inserted_project, gid, _ = self._create_value_wrapper(self.test_user) new_meta_data = { DataTypeOverlayDetails.DATA_SUBJECT: "new subject", DataTypeOverlayDetails.DATA_STATE: "second_state", DataTypeOverlayDetails.CODE_GID: gid, DataTypeOverlayDetails.CODE_OPERATION_TAG: 'new user group' } self.project_service.update_metadata(new_meta_data) new_datatype = dao.get_datatype_by_gid(gid) self.__check_meta_data(new_meta_data, new_datatype) op_path = FilesHelper().get_operation_meta_file_path( inserted_project.name, new_datatype.parent_operation.id) op_meta = XMLReader(op_path).read_metadata() self.assertEqual(op_meta['user_group'], 'new user group', 'UserGroup not updated!') def test_update_meta_data_group(self): """ Test the new update metaData for a group of dataTypes. """ datatypes, group_id = TestFactory.create_group( self.test_user, subject="test-subject-1") new_meta_data = { DataTypeOverlayDetails.DATA_SUBJECT: "new subject", DataTypeOverlayDetails.DATA_STATE: "updated_state", DataTypeOverlayDetails.CODE_OPERATION_GROUP_ID: group_id, DataTypeOverlayDetails.CODE_OPERATION_TAG: 'newGroupName' } self.project_service.update_metadata(new_meta_data) for datatype in datatypes: new_datatype = dao.get_datatype_by_id(datatype.id) self.assertEqual(group_id, new_datatype.parent_operation.fk_operation_group) new_group = dao.get_generic_entity(model.OperationGroup, group_id)[0] self.assertEqual(new_group.name, "newGroupName") self.__check_meta_data(new_meta_data, new_datatype) def _create_datatypes(self, dt_factory, nr_of_dts): for idx in range(nr_of_dts): dt = Datatype1() dt.row1 = "value%i" % (idx, ) dt.row2 = "value%i" % (idx + 1, ) dt_factory._store_datatype(dt) def test_retrieve_project_full(self): """ Tests full project information is retrieved by method `ProjectService.retrieve_project_full(...)` """ dt_factory = datatypes_factory.DatatypesFactory() self._create_datatypes(dt_factory, 3) _, ops_nr, _, operations, pages_no = self.project_service.retrieve_project_full( dt_factory.project.id) self.assertEqual( ops_nr, 1, "DataType Factory should only use one operation to store all it's datatypes." ) self.assertEqual( pages_no, 1, "DataType Factory should only use one operation to store all it's datatypes." ) resulted_dts = operations[0]['results'] self.assertEqual(len(resulted_dts), 3, "3 datatypes should be created.") def test_get_project_structure(self): """ Tests project structure is as expected and contains all datatypes """ dt_factory = datatypes_factory.DatatypesFactory() self._create_datatypes(dt_factory, 3) node_json = self.project_service.get_project_structure( dt_factory.project, None, 'Data_State', 'Data_Subject', None) encoder = JSONEncoder() encoder.iterencode(node_json) # No exceptions were raised so far. project_dts = dao.get_datatypes_for_project(dt_factory.project.id) for dt in project_dts: self.assertTrue( dt.gid in node_json, "Should have all datatypes present in resulting json.")
def __init__(self): super(ProjectController, self).__init__() self.project_service = ProjectService() self.import_service = ImportService()
class ProjectStructureTest(TransactionalTestCase): """ Test ProjectService methods. """ def setUp(self): """ Prepare before each test. """ self.project_service = ProjectService() self.flow_service = FlowService() self.structure_helper = FilesHelper() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, "ProjectStructure") self.relevant_filter = StaticFiltersFactory.build_datatype_filters( single_filter=StaticFiltersFactory.RELEVANT_VIEW) self.full_filter = StaticFiltersFactory.build_datatype_filters( single_filter=StaticFiltersFactory.FULL_VIEW) def tearDown(self): self.delete_project_folders() # def tearDown(self): # """ # Remove project folders and clean up database. # """ # self.clean_database(True) def test_set_operation_visibility(self): """ Check if the visibility for an operation is set correct. """ self.__init_algorithmn() op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "") op1 = dao.store_entity(op1) self.assertTrue(op1.visible, "The operation should be visible.") self.project_service.set_operation_and_group_visibility(op1.gid, False) updated_op = dao.get_operation_by_id(op1.id) self.assertFalse(updated_op.visible, "The operation should not be visible.") def test_set_op_and_group_visibility(self): """ When changing the visibility for an operation that belongs to an operation group, we should also change the visibility for the entire group of operations. """ _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1") list_of_operations = dao.get_operations_in_group(group_id) for operation in list_of_operations: self.assertTrue(operation.visible, "The operation should be visible.") self.project_service.set_operation_and_group_visibility( list_of_operations[0].gid, False) operations = dao.get_operations_in_group(group_id) for operation in operations: self.assertFalse(operation.visible, "The operation should not be visible.") def test_set_op_group_visibility(self): """ Tests if the visibility for an operation group is set correct. """ _, group_id = TestFactory.create_group(self.test_user, subject="test-subject-1") list_of_operations = dao.get_operations_in_group(group_id) for operation in list_of_operations: self.assertTrue(operation.visible, "The operation should be visible.") op_group = dao.get_operationgroup_by_id(group_id) self.project_service.set_operation_and_group_visibility( op_group.gid, False, True) operations = dao.get_operations_in_group(group_id) for operation in operations: self.assertFalse(operation.visible, "The operation should not be visible.") def test_is_upload_operation(self): self.__init_algorithmn() upload_algo = self._create_algo_for_upload() op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "") op2 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "") operations = dao.store_entities([op1, op2]) is_upload_operation = self.project_service.is_upload_operation( operations[0].gid) self.assertFalse(is_upload_operation, "The operation is not an upload operation.") is_upload_operation = self.project_service.is_upload_operation( operations[1].gid) self.assertTrue(is_upload_operation, "The operation is an upload operation.") def test_get_upload_operations(self): """ Test get_all when filter is for Upload category. """ self.__init_algorithmn() upload_algo = self._create_algo_for_upload() project = model.Project("test_proj_2", self.test_user.id, "desc") project = dao.store_entity(project) op1 = model.Operation(self.test_user.id, self.test_project.id, self.algo_inst.id, "") op2 = model.Operation(self.test_user.id, project.id, upload_algo.id, "", status=model.STATUS_FINISHED) op3 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "") op4 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED) op5 = model.Operation(self.test_user.id, self.test_project.id, upload_algo.id, "", status=model.STATUS_FINISHED) operations = dao.store_entities([op1, op2, op3, op4, op5]) upload_operations = self.project_service.get_all_operations_for_uploaders( self.test_project.id) self.assertEqual(2, len(upload_operations), "Wrong number of upload operations.") upload_ids = [operation.id for operation in upload_operations] for i in [3, 4]: self.assertTrue(operations[i].id in upload_ids, "The operation should be an upload operation.") for i in [0, 1, 2]: self.assertFalse( operations[i].id in upload_ids, "The operation should not be an upload operation.") def test_is_datatype_group(self): """ Tests if a datatype is group. """ _, dt_group_id, first_dt, _ = self._create_datatype_group() dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0] is_dt_group = self.project_service.is_datatype_group(dt_group.gid) self.assertTrue(is_dt_group, "The datatype should be a datatype group.") is_dt_group = self.project_service.is_datatype_group(first_dt.gid) self.assertFalse(is_dt_group, "The datatype should not be a datatype group.") def test_count_datatypes_in_group(self): """ Test that counting dataTypes is correct. Happy flow.""" _, dt_group_id, first_dt, _ = self._create_datatype_group() count = dao.count_datatypes_in_group(dt_group_id) self.assertEqual(count, 2) count = dao.count_datatypes_in_group(first_dt.id) self.assertEqual(count, 0, "There should be no dataType.") def test_set_datatype_visibility(self): """ Check if the visibility for a datatype is set correct. """ #it's a list of 3 elem. mapped_arrays = self._create_mapped_arrays(self.test_project.id) for mapped_array in mapped_arrays: is_visible = dao.get_datatype_by_id(mapped_array[0]).visible self.assertTrue(is_visible, "The data type should be visible.") self.project_service.set_datatype_visibility(mapped_arrays[0][2], False) for i in xrange(len(mapped_arrays)): is_visible = dao.get_datatype_by_id(mapped_arrays[i][0]).visible if not i: self.assertFalse(is_visible, "The data type should not be visible.") else: self.assertTrue(is_visible, "The data type should be visible.") def test_set_visibility_for_dt_in_group(self): """ Check if the visibility for a datatype from a datatype group is set correct. """ _, dt_group_id, first_dt, second_dt = self._create_datatype_group() self.assertTrue(first_dt.visible, "The data type should be visible.") self.assertTrue(second_dt.visible, "The data type should be visible.") self.project_service.set_datatype_visibility(first_dt.gid, False) db_dt_group = self.project_service.get_datatype_by_id(dt_group_id) db_first_dt = self.project_service.get_datatype_by_id(first_dt.id) db_second_dt = self.project_service.get_datatype_by_id(second_dt.id) self.assertFalse(db_dt_group.visible, "The data type should be visible.") self.assertFalse(db_first_dt.visible, "The data type should not be visible.") self.assertFalse(db_second_dt.visible, "The data type should be visible.") def test_set_visibility_for_group(self): """ Check if the visibility for a datatype group is set correct. """ _, dt_group_id, first_dt, second_dt = self._create_datatype_group() dt_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0] self.assertTrue(dt_group.visible, "The data type group should be visible.") self.assertTrue(first_dt.visible, "The data type should be visible.") self.assertTrue(second_dt.visible, "The data type should be visible.") self.project_service.set_datatype_visibility(dt_group.gid, False) updated_dt_group = self.project_service.get_datatype_by_id(dt_group_id) updated_first_dt = self.project_service.get_datatype_by_id(first_dt.id) updated_second_dt = self.project_service.get_datatype_by_id( second_dt.id) self.assertFalse(updated_dt_group.visible, "The data type group should be visible.") self.assertFalse(updated_first_dt.visible, "The data type should be visible.") self.assertFalse(updated_second_dt.visible, "The data type should be visible.") def test_getdatatypes_from_dtgroup(self): """ Validate that we can retrieve all DTs from a DT_Group """ _, dt_group_id, first_dt, second_dt = self._create_datatype_group() datatypes = self.project_service.get_datatypes_from_datatype_group( dt_group_id) self.assertEqual( len(datatypes), 2, "There should be 2 datatypes into the datatype group.") expected_dict = {first_dt.id: first_dt, second_dt.id: second_dt} actual_dict = { datatypes[0].id: datatypes[0], datatypes[1].id: datatypes[1] } for key in expected_dict.keys(): expected = expected_dict[key] actual = actual_dict[key] self.assertEqual(expected.id, actual.id, "Not the same id.") self.assertEqual(expected.gid, actual.gid, "Not the same gid.") self.assertEqual(expected.type, actual.type, "Not the same type.") self.assertEqual(expected.subject, actual.subject, "Not the same subject.") self.assertEqual(expected.state, actual.state, "Not the same state.") self.assertEqual(expected.visible, actual.visible, "The datatype visibility is not correct.") self.assertEqual(expected.module, actual.module, "Not the same module.") self.assertEqual(expected.user_tag_1, actual.user_tag_1, "Not the same user_tag_1.") self.assertEqual(expected.invalid, actual.invalid, "The invalid field value is not correct.") self.assertEqual(expected.is_nan, actual.is_nan, "The is_nan field value is not correct.") def test_get_operations_for_dt(self): created_ops, datatype_gid = self._create_operations_with_inputs() operations = self.project_service.get_operations_for_datatype( datatype_gid, self.relevant_filter) self.assertEqual(len(operations), 2) self.assertTrue( created_ops[0].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.") self.assertTrue( created_ops[2].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.") operations = self.project_service.get_operations_for_datatype( datatype_gid, self.full_filter) self.assertEqual(len(operations), 4) ids = [ operations[0].id, operations[1].id, operations[2].id, operations[3].id ] for i in range(4): self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.") operations = self.project_service.get_operations_for_datatype( datatype_gid, self.relevant_filter, True) self.assertEqual(len(operations), 1) self.assertEqual(created_ops[4].id, operations[0].id, "Incorrect number of operations.") operations = self.project_service.get_operations_for_datatype( datatype_gid, self.full_filter, True) self.assertEqual(len(operations), 2) self.assertTrue( created_ops[4].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.") self.assertTrue( created_ops[5].id in [operations[0].id, operations[1].id], "Retrieved wrong operations.") def test_get_operations_for_dt_group(self): created_ops, dt_group_id = self._create_operations_with_inputs(True) ops = self.project_service.get_operations_for_datatype_group( dt_group_id, self.relevant_filter) self.assertEqual(len(ops), 2) self.assertTrue(created_ops[0].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.") self.assertTrue(created_ops[2].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.") ops = self.project_service.get_operations_for_datatype_group( dt_group_id, self.full_filter) self.assertEqual(len(ops), 4, "Incorrect number of operations.") ids = [ops[0].id, ops[1].id, ops[2].id, ops[3].id] for i in range(4): self.assertTrue(created_ops[i].id in ids, "Retrieved wrong operations.") ops = self.project_service.get_operations_for_datatype_group( dt_group_id, self.relevant_filter, True) self.assertEqual(len(ops), 1) self.assertEqual(created_ops[4].id, ops[0].id, "Incorrect number of operations.") ops = self.project_service.get_operations_for_datatype_group( dt_group_id, self.full_filter, True) self.assertEqual(len(ops), 2) self.assertTrue(created_ops[4].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.") self.assertTrue(created_ops[5].id in [ops[0].id, ops[1].id], "Retrieved wrong operations.") def test_get_inputs_for_operation(self): algo_group = dao.find_group('tvb_test.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) array_wrappers = self._create_mapped_arrays(self.test_project.id) ids = [] for datatype in array_wrappers: ids.append(datatype[0]) datatype = dao.get_datatype_by_id(ids[0]) datatype.visible = False dao.store_entity(datatype) parameters = json.dumps({ "param_5": "1", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0" }) operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) self.assertEqual(len(inputs), 2) self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) self.assertEqual(len(inputs), 3, "Incorrect number of operations.") self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") project, dt_group_id, first_dt, _ = self._create_datatype_group() first_dt.visible = False dao.store_entity(first_dt) parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid}) operation = model.Operation(self.test_user.id, project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.") inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.") self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.") def test_get_inputs_for_op_group(self): """ Tests method get_datatypes_inputs_for_operation_group. The DataType inputs will be from a DataType group. """ project, dt_group_id, first_dt, second_dt = self._create_datatype_group( ) first_dt.visible = False dao.store_entity(first_dt) second_dt.visible = False dao.store_entity(second_dt) op_group = model.OperationGroup(project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({ "param_5": "1", "param_1": first_dt.gid, "param_6": "2" }) params_2 = json.dumps({ "param_5": "1", "param_4": second_dt.gid, "param_6": "5" }) algo_group = dao.find_group('tvb_test.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 0) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") first_dt.visible = True dao.store_entity(first_dt) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") def test_get_inputs_for_op_group_simple_inputs(self): """ Tests method get_datatypes_inputs_for_operation_group. The dataType inputs will not be part of a dataType group. """ #it's a list of 3 elem. array_wrappers = self._create_mapped_arrays(self.test_project.id) array_wrapper_ids = [] for datatype in array_wrappers: array_wrapper_ids.append(datatype[0]) datatype = dao.get_datatype_by_id(array_wrapper_ids[0]) datatype.visible = False dao.store_entity(datatype) op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({ "param_5": "2", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_6": "7" }) params_2 = json.dumps({ "param_5": "5", "param_3": array_wrappers[2][2], "param_2": array_wrappers[1][2], "param_6": "6" }) algo_group = dao.find_group('tvb_test.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 2) self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.") self.assertTrue( array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue( array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue( array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id]) def test_remove_datatype(self): """ Tests the deletion of a datatype. """ #it's a list of 3 elem. array_wrappers = self._create_mapped_arrays(self.test_project.id) dt_list = [] for array_wrapper in array_wrappers: dt_list.append(dao.get_datatype_by_id(array_wrapper[0])) self.project_service.remove_datatype(self.test_project.id, dt_list[0].gid) self._check_if_datatype_was_removed(dt_list[0]) def test_remove_datatype_from_group(self): """ Tests the deletion of a datatype group. """ project, dt_group_id, first_dt, second_dt = self._create_datatype_group( ) datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0] self.project_service.remove_datatype(project.id, first_dt.gid) self._check_if_datatype_was_removed(first_dt) self._check_if_datatype_was_removed(second_dt) self._check_if_datatype_was_removed(datatype_group) self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group) def test_remove_datatype_group(self): """ Tests the deletion of a datatype group. """ project, dt_group_id, first_dt, second_dt = self._create_datatype_group( ) datatype_group = dao.get_generic_entity(model.DataTypeGroup, dt_group_id)[0] self.project_service.remove_datatype(project.id, datatype_group.gid) self._check_if_datatype_was_removed(first_dt) self._check_if_datatype_was_removed(second_dt) self._check_if_datatype_was_removed(datatype_group) self._check_datatype_group_removed(dt_group_id, datatype_group.fk_operation_group) def _create_mapped_arrays(self, project_id): array_wrappers = self.flow_service.get_available_datatypes( project_id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(array_wrappers), 0) algo_group = dao.find_group('tvb_test.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter') group, _ = self.flow_service.prepare_adapter(project_id, algo_group) adapter_instance = self.flow_service.build_adapter_instance(group) data = {'param_1': 'some value'} #create 3 data types self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data) array_wrappers = self.flow_service.get_available_datatypes( project_id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(array_wrappers), 1) self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data) array_wrappers = self.flow_service.get_available_datatypes( project_id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(array_wrappers), 2) self.flow_service.fire_operation(adapter_instance, self.test_user, project_id, **data) array_wrappers = self.flow_service.get_available_datatypes( project_id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(array_wrappers), 3) return array_wrappers def _create_operation(self, project_id, algorithm_id): algorithm = dao.get_algorithm_by_id(algorithm_id) meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW" } operation = model.Operation(self.test_user.id, project_id, algorithm.id, 'test params', meta=json.dumps(meta), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD) return dao.store_entity(operation) def _create_datatype_group(self): """ Creates a project, one DataTypeGroup with 2 DataTypes into the new group. """ test_project = TestFactory.create_project(self.test_user, "NewProject") all_operations = dao.get_filtered_operations(test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation.") datatypes, op_group_id = TestFactory.create_group( self.test_user, test_project) dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id) return test_project, dt_group.id, datatypes[0], datatypes[1] def _create_operations_with_inputs(self, is_group_parent=False): """ Method used for creating a complex tree of operations. If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as input for the returned operations. """ group_dts, root_op_group_id = TestFactory.create_group( self.test_user, self.test_project) if is_group_parent: datatype_gid = group_dts[0].gid else: datatype_gid = ProjectServiceTest._create_value_wrapper( self.test_user, self.test_project)[1] parameters = json.dumps({"param_name": datatype_gid}) ops = [] for i in range(4): ops.append( TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)) if i in [1, 3]: ops[i].visible = False ops[i].parameters = parameters ops[i] = dao.store_entity(ops[i]) #groups _, ops_group = TestFactory.create_group(self.test_user, self.test_project) ops_group = dao.get_operations_in_group(ops_group) self.assertEqual(2, len(ops_group)) ops_group[0].parameters = parameters ops_group[0] = dao.store_entity(ops_group[0]) ops_group[1].visible = False ops_group[1].parameters = parameters ops_group[1] = dao.store_entity(ops_group[1]) ops.extend(ops_group) if is_group_parent: dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id) return ops, dt_group.id return ops, datatype_gid def _check_if_datatype_was_removed(self, datatype): """ Check if a certain datatype was removed. """ try: dao.get_datatype_by_id(datatype.id) self.fail("The datatype was not deleted.") except Exception: pass try: dao.get_operation_by_id(datatype.fk_from_operation) self.fail("The operation was not deleted.") except Exception: pass def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id): """ Checks if the DataTypeGroup and OperationGroup was removed. """ try: dao.get_generic_entity(model.DataTypeGroup, datatype_group_id) self.fail("The DataTypeGroup entity was not removed.") except Exception: pass try: dao.get_operationgroup_by_id(operation_groupp_id) self.fail("The OperationGroup entity was not removed.") except Exception: pass def __init_algorithmn(self): """ Insert some starting data in the database. """ categ1 = model.AlgorithmCategory('one', True) self.categ1 = dao.store_entity(categ1) algo = model.AlgorithmGroup("tvb_test.core.services.flowservice_test", "ValidTestAdapter", categ1.id) adapter = dao.store_entity(algo) algo = model.Algorithm(adapter.id, 'ident', name='', req_data='', param_name='', output='') self.algo_inst = dao.store_entity(algo) @staticmethod def _create_algo_for_upload(): """ Creates a fake algorithm for an upload category. """ category = dao.store_entity( model.AlgorithmCategory("upload_category", rawinput=True)) algo_group = dao.store_entity( model.AlgorithmGroup("module", "classname", category.id)) return dao.store_entity(model.Algorithm(algo_group.id, "algo"))
class ImportServiceTest(TransactionalTestCase): """ This class contains tests for the tvb.core.services.flowservice module. """ def setUp(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter( test_project=self.test_project) TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project) self.zip_path = None def tearDown(self): """ Reset the database when test is done. """ ### Delete TEMP folder if os.path.exists(cfg.TVB_TEMP_FOLDER): shutil.rmtree(cfg.TVB_TEMP_FOLDER) ### Delete folder where data was exported if os.path.exists(self.zip_path): shutil.rmtree(os.path.split(self.zip_path)[0]) self.delete_project_folders() def test_import_export(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted), 2, "Problems when inserting data") #create a value wrapper value_wrapper = self._create_value_wrapper() result = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual( len(result), 2, "Should be two operations before export and not " + str(len(result)) + " !") self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") # Now remove the original project self.project_service.remove_project(self.test_project.id) result, lng_ = self.project_service.retrieve_projects_for_user( self.test_user.id) self.assertEqual(0, len(result), "Project Not removed!") self.assertEqual(0, lng_, "Project Not removed!") # Now try to import again project self.import_service.import_project_structure(self.zip_path, self.test_user.id) result = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(result), 1, "There should be only one project.") self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.") self.assertEqual(result[0].description, "test_desc", "The project description is not correct.") self.test_project = result[0] result = dao.get_filtered_operations(self.test_project.id, None) #1 op. - import project; 1 op. - save the array wrapper self.assertEqual( len(result), 2, "Should be two operations after export and not " + str(len(result)) + " !") for gid in expected_results: datatype = dao.get_datatype_by_gid(gid) self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly') self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly') #check the value wrapper new_val = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper") self.assertEqual(len(new_val), 1, "One !=" + str(len(new_val))) new_val = ABCAdapter.load_entity_by_gid(new_val[0][2]) self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect") self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect") self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect") def test_import_export_existing(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted), 2, "Problems when inserting data") #create a value wrapper self._create_value_wrapper() result = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual( len(result), 2, "Should be two operations before export and not " + str(len(result)) + " !") self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") try: self.import_service.import_project_structure( self.zip_path, self.test_user.id) self.fail("Invalid import as the project already exists!") except ProjectImportException: #OK, do nothing. The project already exists. pass def _create_timeseries(self): """Launch adapter to persist a TimeSeries entity""" activity_data = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]) time_data = numpy.array([1, 2, 3]) storage_path = FilesHelper().get_project_folder(self.test_project) time_series = TimeSeries(time_files=None, activity_files=None, max_chunk=10, maxes=None, mins=None, data_shape=numpy.shape(activity_data), storage_path=storage_path, label_y="Time", time_data=time_data, data_name='TestSeries', activity_data=activity_data, sample_period=10.0) self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series") timeseries = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.time_series.TimeSeries") self.assertEqual(len(timeseries), 1, "Should be only one TimeSeries") def _create_value_wrapper(self): """Persist ValueWrapper""" value_ = ValueWrapper(data_value=5.0, data_name="my_value") self._store_entity(value_, "ValueWrapper", "tvb.datatypes.mapped_values") valuew = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper") self.assertEqual(len(valuew), 1, "Should be only one value wrapper") return ABCAdapter.load_entity_by_gid(valuew[0][2]) def _store_entity(self, entity, type_, module): """Launch adapter to store a create a persistent DataType.""" entity.type = type_ entity.module = module entity.subject = "John Doe" entity.state = "RAW_STATE" entity.set_operation_id(self.operation.id) adapter_instance = StoreAdapter([entity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
def create_json(self, item_gid, item_type, visibility_filter): """ Method used for creating a JSON representation of a graph. """ selected_filter = StaticFiltersFactory.build_datatype_filters( single_filter=visibility_filter) graph_branches = [] project = bc.get_current_project() is_upload_operation = (item_type == graph_structures.NODE_OPERATION_TYPE) and \ (self.project_service.is_upload_operation(item_gid) or item_gid == "firstOperation") if is_upload_operation: uploader_operations = self.project_service.get_all_operations_for_uploaders( project.id) for operation in uploader_operations: dt_outputs = self.project_service.get_results_for_operation( operation.id, selected_filter) dt_outputs = self._create_datatype_nodes(dt_outputs) parent_op = self._create_operation_nodes([operation], item_gid) branch = graph_structures.GraphBranch([], parent_op, dt_outputs, []) graph_branches.append(branch) graph = graph_structures.GraphStructure(graph_branches) return graph.to_json() dt_inputs, parent_op, dt_outputs, op_inputs = [], [], [], [] if item_type == graph_structures.NODE_OPERATION_TYPE: dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation( item_gid, selected_filter) parent_op = self.project_service.load_operation_by_gid(item_gid) dt_outputs = self.project_service.get_results_for_operation( parent_op.id, selected_filter) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [parent_op], dt_outputs, [], item_gid) elif item_type == graph_structures.NODE_OPERATION_GROUP_TYPE: parent_op_group = self.project_service.get_operation_group_by_gid( item_gid) dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group( parent_op_group.id, selected_filter) datatype_group = self.project_service.get_datatypegroup_by_op_group_id( parent_op_group.id) datatype = self.project_service.get_datatype_by_id( datatype_group.id) dt_inputs = self._create_datatype_nodes(dt_inputs) parent_op = graph_structures.OperationGroupNodeStructure( parent_op_group.gid) parent_op.selected = True parent_op = [parent_op] if selected_filter.display_name == StaticFiltersFactory.RELEVANT_VIEW and datatype.visible is False: dt_outputs = [] else: dt_outputs = self._create_datatype_nodes([datatype]) elif item_type == graph_structures.NODE_DATATYPE_TYPE: selected_dt = ABCAdapter.load_entity_by_gid(item_gid) if self.project_service.is_datatype_group(item_gid): datatype_group = self.project_service.get_datatypegroup_by_gid( selected_dt.gid) parent_op_group = self.project_service.get_operation_group_by_id( datatype_group.fk_operation_group) dt_inputs = self.project_service.get_datatypes_inputs_for_operation_group( parent_op_group.id, selected_filter) op_inputs = self.project_service.get_operations_for_datatype_group( selected_dt.id, selected_filter) op_inputs_in_groups = self.project_service.get_operations_for_datatype_group( selected_dt.id, selected_filter, only_in_groups=True) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [], [selected_dt], op_inputs, item_gid) parent_op = [ graph_structures.OperationGroupNodeStructure( parent_op_group.gid) ] op_inputs_in_groups = self._create_operation_group_nodes( op_inputs_in_groups) op_inputs.extend(op_inputs_in_groups) else: parent_op = self.flow_service.load_operation( selected_dt.fk_from_operation) dt_inputs = ProjectService.get_datatype_and_datatypegroup_inputs_for_operation( parent_op.gid, selected_filter) op_inputs = self.project_service.get_operations_for_datatype( selected_dt.gid, selected_filter) op_inputs_in_groups = self.project_service.get_operations_for_datatype( selected_dt.gid, selected_filter, only_in_groups=True) dt_outputs = self.project_service.get_results_for_operation( parent_op.id, selected_filter) #create graph nodes dt_inputs, parent_op, dt_outputs, op_inputs = self._create_nodes( dt_inputs, [parent_op], dt_outputs, op_inputs, item_gid) op_inputs_in_groups = self._create_operation_group_nodes( op_inputs_in_groups) op_inputs.extend(op_inputs_in_groups) else: self.logger.error("Invalid item type: " + str(item_type)) raise Exception("Invalid item type.") branch = graph_structures.GraphBranch(dt_inputs, parent_op, dt_outputs, op_inputs) graph_branches.append(branch) graph = graph_structures.GraphStructure(graph_branches) return graph.to_json()
def import_project_structure(self, uploaded, user_id): """ Execute import operations: 1. check if ZIP or folder 2. find all project nodes 3. for each project node: - create project - create all operations - import all images - create all dataTypes """ self.user_id = user_id self.created_projects = [] # Now we compute the name of the file where to store uploaded project now = datetime.now() date_str = "%d-%d-%d_%d-%d-%d_%d" % (now.year, now.month, now.day, now.hour, now.minute, now.second, now.microsecond) uq_name = "%s-ImportProject" % date_str uq_file_name = os.path.join(cfg.TVB_TEMP_FOLDER, uq_name + ".zip") temp_folder = None try: if isinstance(uploaded, FieldStorage) or isinstance( uploaded, Part): if uploaded.file: file_obj = open(uq_file_name, 'wb') file_obj.write(uploaded.file.read()) file_obj.close() else: raise ProjectImportException( "Please select the archive which contains the project structure." ) else: shutil.copyfile(uploaded, uq_file_name) # Now compute the name of the folder where to explode uploaded ZIP file temp_folder = os.path.join(cfg.TVB_TEMP_FOLDER, uq_name) try: self.files_helper.unpack_zip(uq_file_name, temp_folder) except FileStructureException, excep: self.logger.exception(excep) raise ProjectImportException( "Bad ZIP archive provided. A TVB exported project is expected!" ) try: self._import_project_from_folder(temp_folder) except Exception, excep: self.logger.exception(excep) self.logger.debug( "Error encountered during import. Deleting projects created during this operation." ) # Roll back projects created so far project_service = ProjectService() for project in self.created_projects: project_service.remove_project(project.id) raise ProjectImportException(str(excep))