def create_interlinked_projects(self, region_mapping_factory, time_series_region_index_factory): """ Extend the two projects created in setup. Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project. Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity """ # add a connectivity to src project and link it to dest project zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') TestFactory.import_zip_connectivity(self.dst_user, self.dest_project, zip_path, "John") conn = TestFactory.get_entity(self.dest_project, ConnectivityIndex) self.flow_service.create_link([conn.id], self.dest_project.id) # in dest derive a time series from the linked conn path = os.path.join(os.path.dirname(tvb_data.surfaceData.__file__), 'cortex_16384.zip') surface = TestFactory.import_surface_zip(self.dst_user, self.dest_project, path, CORTICAL) TXT_FILE = os.path.join(os.path.dirname(demo_data.__file__), 'regionMapping_16k_76.txt') region_mapping = TestFactory.import_region_mapping(self.dst_user, self.dest_project, TXT_FILE, surface.gid, conn.gid) ts = time_series_region_index_factory(connectivity=h5.load_from_index(conn), region_mapping=h5.load_from_index(region_mapping)) # then link the time series in the src project self.flow_service.create_link([ts.id], self.src_project.id) assert 3 == len(dao.get_datatypes_in_project(self.src_project.id)) assert 1 == len(dao.get_linked_datatypes_in_project(self.src_project.id)) assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id)) assert 3 == len(dao.get_linked_datatypes_in_project(self.dest_project.id))
def build(): """ Project src will have 3 datatypes, and a link to the VW from the dest project. Project dest will have the derived VW and links """ # add a connectivity to src project and link it to dest project zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') conn = TestFactory.import_zip_connectivity(self.dst_user, self.src_project, zip_path, "John") self.flow_service.create_link([conn.id], self.dest_project.id) # in dest derive a ValueWrapper from the linked conn vw_gid = TestFactory.create_value_wrapper(self.dst_user, self.dest_project)[1] vw = dao.get_datatype_by_gid(vw_gid) # then link the time series in the src project self.flow_service.create_link([vw.id], self.src_project.id) assert 3 == len(dao.get_datatypes_in_project(self.src_project.id)) assert 1 == len( dao.get_linked_datatypes_in_project(self.src_project.id)) assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id)) assert 3 == len( dao.get_linked_datatypes_in_project(self.dest_project.id))
def test_links_recreated_on_import(self): export_file = self._export_and_remove(self.dest_project) imported_proj_id = self._import(export_file, self.dest_usr_id) self.assertEqual(1, self.red_datatypes_in(imported_proj_id)) self.assertEqual(1, self.blue_datatypes_in(imported_proj_id)) links = dao.get_linked_datatypes_in_project(imported_proj_id) self.assertEqual(2, len(links))
def test_linked_datatype_dependencies_restored_on_import_inverse_order(self): self._create_interlinked_projects() # export both then remove them export_file_src = self._export_and_remove(self.src_project) self.assertEqual(4, len(dao.get_datatypes_in_project(self.dest_project.id))) self.assertEqual(0, len(dao.get_linked_datatypes_in_project(self.dest_project.id))) export_file_dest = self._export_and_remove(self.dest_project) # importing dest before src should work imported_id_2 = self._import(export_file_dest, self.dest_usr_id) self.assertEqual(4, len(dao.get_datatypes_in_project(imported_id_2))) self.assertEqual(0, len(dao.get_linked_datatypes_in_project(imported_id_2))) imported_id_1 = self._import(export_file_src, self.src_usr_id) self.assertEqual(0, len(dao.get_datatypes_in_project(imported_id_1))) self.assertEqual(4, len(dao.get_linked_datatypes_in_project(imported_id_1)))
def test_links_recreated_on_import(self, initialize_linked_projects): export_file = self._export_and_remove(self.dest_project) imported_proj_id = self._import(export_file, self.dst_usr_id) assert 1 == self.red_datatypes_in(imported_proj_id) assert 1 == self.blue_datatypes_in(imported_proj_id) links = dao.get_linked_datatypes_in_project(imported_proj_id) assert 2 == len(links)
def test_linked_datatype_dependencies_restored_on_import_inverse_order(self, transactional_setup_fixture, create_interlinked_projects): create_interlinked_projects() # export both then remove them export_file_src = self._export_and_remove(self.src_project) assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id)) assert 0 == len(dao.get_linked_datatypes_in_project(self.dest_project.id)) export_file_dest = self._export_and_remove(self.dest_project) # importing dest before src should work imported_id_2 = self._import(export_file_dest, self.dest_usr_id) assert 4 == len(dao.get_datatypes_in_project(imported_id_2)) assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_2)) imported_id_1 = self._import(export_file_src, self.src_usr_id) assert 0 == len(dao.get_datatypes_in_project(imported_id_1)) assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_1))
def test_links_recreated_on_import(self, transactional_setup_fixture): export_file = self._export_and_remove(self.dest_project) imported_proj_id = self._import(export_file, self.dst_usr_id) assert 1 == self.red_datatypes_in(imported_proj_id) assert 0 == self.blue_datatypes_in(imported_proj_id) links = dao.get_linked_datatypes_in_project(imported_proj_id) assert 2 == len(links)
def test_linked_datatype_dependencies_restored_on_import(self): self._create_interlinked_projects() # export both then remove them export_file_src = self._export_and_remove(self.src_project) assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id)) assert 0 == len(dao.get_linked_datatypes_in_project(self.dest_project.id)) export_file_dest = self._export_and_remove(self.dest_project) # importing both projects should work imported_id_1 = self._import(export_file_src, self.src_usr_id) assert 4 == len(dao.get_datatypes_in_project(imported_id_1)) assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_1)) imported_id_2 = self._import(export_file_dest, self.dest_usr_id) assert 0 == len(dao.get_datatypes_in_project(imported_id_2)) assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_2))
def _create_interlinked_projects(self): """ Extend the two projects created in setup. Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project. Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity """ # add a connectivity to src project and link it to dest project _, conn = self.datatype_factory_src.create_connectivity() self.flow_service.create_link([conn.id], self.dest_project.id) # in dest derive a time series from the linked conn ts = self.datatype_factory_dest.create_timeseries(conn) # then link the time series in the src project self.flow_service.create_link([ts.id], self.src_project.id) self.assertEqual(3, len(dao.get_datatypes_in_project(self.src_project.id))) self.assertEqual(1, len(dao.get_linked_datatypes_in_project(self.src_project.id))) self.assertEqual(1, len(dao.get_datatypes_in_project(self.dest_project.id))) self.assertEqual(3, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))
def test_datatypes_recreated_on_import(self): export_file = self._export_and_remove(self.dest_project) self.project_service.remove_project(self.src_project.id) # both projects have been deleted # import should recreate links as datatypes imported_proj_id = self._import(export_file, self.dest_usr_id) self.assertEqual(1, self.red_datatypes_in(imported_proj_id)) self.assertEqual(1, self.blue_datatypes_in(imported_proj_id)) links = dao.get_linked_datatypes_in_project(imported_proj_id) self.assertEqual(0, len(links))
def test_link_appears_in_project_structure(self): dest_id = self.dest_project.id self.flow_service.create_link([self.red_datatype.id], dest_id) # Test getting information about linked datatypes, from low level methods to the one used by the UI dt_1s = dao.get_linked_datatypes_in_project(dest_id) self.assertEqual(1, len(dt_1s)) self.assertEqual(1, self.red_datatypes_in(dest_id)) json = self.project_service.get_project_structure(self.dest_project, None, DataTypeMetaData.KEY_STATE, DataTypeMetaData.KEY_SUBJECT, None) self.assertTrue(self.red_datatype.gid in json)
def test_datatypes_recreated_on_import(self, initialize_linked_projects): export_file = self._export_and_remove(self.dest_project) self.project_service.remove_project(self.src_project.id) # both projects have been deleted # import should recreate links as datatypes imported_proj_id = self._import(export_file, self.dst_usr_id) assert 1 == self.red_datatypes_in(imported_proj_id) assert 1 == self.blue_datatypes_in(imported_proj_id) links = dao.get_linked_datatypes_in_project(imported_proj_id) assert 0 == len(links)
def test_link_appears_in_project_structure(self, initialize_two_projects): dest_id = self.dest_project.id self.algorithm_service.create_link([self.red_datatype.id], dest_id) # Test getting information about linked datatypes, from low level methods to the one used by the UI dt_1s = dao.get_linked_datatypes_in_project(dest_id) assert 1 == len(dt_1s) assert 1 == self.red_datatypes_in(dest_id) json = self.project_service.get_project_structure( self.dest_project, None, DataTypeMetaData.KEY_STATE, DataTypeMetaData.KEY_SUBJECT, None) assert self.red_datatype.gid in json
def test_remove_entity_with_links_moves_links(self): dest_id = self.dest_project.id self.flow_service.create_link([self.red_datatype.id], dest_id) self.assertEqual(1, self.red_datatypes_in(dest_id)) # remove original datatype self.project_service.remove_datatype(self.src_project.id, self.red_datatype.gid) # datatype has been moved to one of it's links self.assertEqual(1, self.red_datatypes_in(dest_id)) # project dest no longer has a link but owns the data type dt_links = dao.get_linked_datatypes_in_project(dest_id) self.assertEqual(0, len(dt_links))
def test_datatypes_and_links_recreated_on_import(self, transactional_setup_fixture): export_file = self._export_and_remove(self.dest_project) # remove datatype 2 from source project self.project_service.remove_datatype(self.src_project.id, self.blue_datatype.gid) imported_proj_id = self._import(export_file, self.dst_usr_id) # both datatypes should be recreated assert 1 == self.red_datatypes_in(imported_proj_id) assert 1 == self.blue_datatypes_in(imported_proj_id) # only datatype 1 should be a link links = dao.get_linked_datatypes_in_project(imported_proj_id) assert 1 == len(links) assert self.red_datatype.gid == links[0].gid
def test_datatypes_and_links_recreated_on_import(self): export_file = self._export_and_remove(self.dest_project) # remove datatype 2 from source project self.project_service.remove_datatype(self.src_project.id, self.blue_datatype.gid) imported_proj_id = self._import(export_file, self.dest_usr_id) # both datatypes should be recreated self.assertEqual(1, self.red_datatypes_in(imported_proj_id)) self.assertEqual(1, self.blue_datatypes_in(imported_proj_id)) # only datatype 1 should be a link links = dao.get_linked_datatypes_in_project(imported_proj_id) self.assertEqual(1, len(links)) self.assertEquals(self.red_datatype.gid, links[0].gid)
def _get_linked_datatypes_storage_path(project): """ :return: the file paths to the datatypes that are linked in `project` """ paths = [] for lnk_dt in dao.get_linked_datatypes_in_project(project.id): # get datatype as a mapped type lnk_dt = dao.get_datatype_by_gid(lnk_dt.gid) if lnk_dt.storage_path is not None: paths.append(lnk_dt.get_storage_file_path()) else: LOG.warning("Problem when trying to retrieve path on %s:%s for export!" % (lnk_dt.type, lnk_dt.gid)) return paths
def test_remove_entity_with_links_moves_links(self, initialize_two_projects): project_path = FilesHelper().get_project_folder(self.src_project) self.red_datatype.storage_path = project_path dest_id = self.dest_project.id self.flow_service.create_link([self.red_datatype.id], dest_id) assert 1 == self.red_datatypes_in(dest_id) # remove original datatype self.project_service.remove_datatype(self.src_project.id, self.red_datatype.gid) # datatype has been moved to one of it's links assert 1 == self.red_datatypes_in(dest_id) # project dest no longer has a link but owns the data type dt_links = dao.get_linked_datatypes_in_project(dest_id) assert 0 == len(dt_links)
def get_linked_datatypes_storage_path(self, project): """ :return: the file paths to the datatypes that are linked in `project` """ paths = [] for lnk_dt in dao.get_linked_datatypes_in_project(project.id): # get datatype as a mapped type path = h5.path_for_stored_index(lnk_dt) if path is not None: paths.append(path) else: self.logger.warning("Problem when trying to retrieve path on %s:%s!" % (lnk_dt.type, lnk_dt.gid)) return paths
def _get_linked_datatypes_storage_path(project): """ :return: the file paths to the datatypes that are linked in `project` """ paths = [] for lnk_dt in dao.get_linked_datatypes_in_project(project.id): # get datatype as a mapped type lnk_dt = dao.get_datatype_by_gid(lnk_dt.gid) if lnk_dt.storage_path is not None: paths.append(lnk_dt.get_storage_file_path()) else: LOG.warning("Problem when trying to retrieve path on %s:%s for export!" % (lnk_dt.type, lnk_dt.gid)) return paths
def test_remove_entity_with_links_moves_links(self, initialize_two_projects): dest_id = self.dest_project.id self.algorithm_service.create_link(self.red_datatype.id, dest_id) assert 1 == self.red_datatypes_in(dest_id) # remove original datatype self.project_service.remove_datatype(self.src_project.id, self.red_datatype.gid) # datatype has been moved to one of it's links assert 1 == self.red_datatypes_in(dest_id) # project dest no longer has a link but owns the data type dt_links = dao.get_linked_datatypes_in_project(dest_id) assert 0 == len(dt_links)