def test_store_load_region_mapping(session, connectivity_factory, surface_factory, region_mapping_factory, sensors_factory): connectivity = connectivity_factory(2) conn_idx = ConnectivityIndex() conn_idx.fill_from_has_traits(connectivity) session.add(conn_idx) surface = surface_factory(5) surf_idx = SurfaceIndex() surf_idx.fill_from_has_traits(surface) session.add(surf_idx) region_mapping = region_mapping_factory(surface, connectivity) rm_idx = RegionMappingIndex() rm_idx.fill_from_has_traits(region_mapping) rm_idx.connectivity = conn_idx rm_idx.surface = surf_idx session.add(rm_idx) sensors = sensors_factory("SEEG", 3) sensors_seeg_idx = SensorsIndex() sensors_seeg_idx.fill_from_has_traits(sensors) session.add(sensors_seeg_idx) sensors_eeg = sensors_factory("EEG", 3) sensors_eeg_idx = SensorsIndex() sensors_eeg_idx.fill_from_has_traits(sensors_eeg) session.add(sensors_eeg_idx) time_series = TimeSeries(data=numpy.arange(5)) fcd = Fcd( array_data=numpy.arange(5), source=time_series, ) ts_index = TimeSeriesIndex() ts_index.fill_from_has_traits(time_series) session.add(ts_index) fcd_index = FcdIndex() fcd_index.fill_from_has_traits(fcd) fcd_index.source = ts_index session.add(fcd_index) session.commit() res = session.query(ConnectivityIndex) assert res.count() == 1 assert res[0].number_of_regions == 2 assert res[0].number_of_connections == 4 assert res[0].undirected is True assert res[0].weights_min == 0 res = session.query(SurfaceIndex) assert res.count() == 1 res = session.query(RegionMappingIndex) assert res.count() == 1
def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, subject=TEST_SUBJECT_A) field = FilterChain.datatype + '.subject' filters = FilterChain('', [field], [TEST_SUBJECT_A], ['==']) reference_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters) dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) self._import_csv_test_connectivity(reference_connectivity_index.gid, TEST_SUBJECT_B) dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) assert dt_count_before + 1 == dt_count_after filters = FilterChain('', [field], [TEST_SUBJECT_B], ['like']) imported_connectivity_index = TestFactory.get_entity(self.test_project, ConnectivityIndex, filters) # check relationship between the imported connectivity and the reference assert reference_connectivity_index.number_of_regions == imported_connectivity_index.number_of_regions assert not reference_connectivity_index.number_of_connections == imported_connectivity_index.number_of_connections reference_connectivity = h5.load_from_index(reference_connectivity_index) imported_connectivity = h5.load_from_index(imported_connectivity_index) assert not (reference_connectivity.weights == imported_connectivity.weights).all() assert not (reference_connectivity.tract_lengths == imported_connectivity.tract_lengths).all() assert (reference_connectivity.centres == imported_connectivity.centres).all() assert (reference_connectivity.orientations == imported_connectivity.orientations).all() assert (reference_connectivity.region_labels == imported_connectivity.region_labels).all()
def test_happy_flow_import(self): """ Test that importing a CFF generates at least one DataType in DB. """ zip_path = path.join(path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') dt_count_before = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path, "John") dt_count_after = TestFactory.get_entity_count(self.test_project, ConnectivityIndex()) assert dt_count_before + 1 == dt_count_after
def setup(): """ Sets up the environment for running the tests; creates a test user, a test project, a connectivity and a surface; imports a CFF data-set """ self.test_project = datatype_factory['project'] self.test_user = datatype_factory['user'] zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path); self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex()) assert self.connectivity is not None
def test_server_get_operations_for_datatype(self): zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get( self.test_project.gid) assert type(datatypes_in_project) is list assert len(datatypes_in_project) == 1 assert datatypes_in_project[0].type == ConnectivityIndex().display_type result = self.get_operations_resource.get(datatypes_in_project[0].gid) assert type(result) is list assert len(result) > 3
def transactional_setup_method(self): """ Sets up the environment for running the tests; creates a test user, a test project, a connectivity and a surface; imports a CFF data-set """ self.test_user = TestFactory.create_user('CrossCoherence_User') self.test_project = TestFactory.create_project( self.test_user, "CrossCoherence_Project") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) self.connectivity = TestFactory.get_entity(self.test_project, ConnectivityIndex()) assert self.connectivity is not None
def test_server_retrieve_datatype(self, mocker): zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_96.zip') TestFactory.import_zip_connectivity(self.test_user, self.test_project, zip_path) datatypes_in_project = self.get_data_in_project_resource.get( self.test_project.gid) assert type(datatypes_in_project) is list assert len(datatypes_in_project) == 1 assert datatypes_in_project[0].type == ConnectivityIndex().display_type def send_file_dummy(path, as_attachment, attachment_filename): return (path, as_attachment, attachment_filename) # Mock flask.send_file to behave like send_file_dummy mocker.patch('flask.send_file', send_file_dummy) result = self.retrieve_resource.get(datatypes_in_project[0].gid) assert type(result) is tuple assert result[1] is True assert result[0] == result[2]
def fire_simulation_example(tvb_client_instance): logger.info("Requesting projects for logged user") projects_of_user = tvb_client_instance.get_project_list() assert len(projects_of_user) > 0 logger.info("TVB has {} projects for this user".format( len(projects_of_user))) project_gid = projects_of_user[0].gid logger.info("Requesting datatypes from project {}...".format(project_gid)) data_in_project = tvb_client_instance.get_data_in_project(project_gid) logger.info("We have {} datatypes".format(len(data_in_project))) logger.info("Requesting operations from project {}...".format(project_gid)) ops_in_project, _ = tvb_client_instance.get_operations_in_project( project_gid, 1) logger.info("Displayname of the first operation is: {}".format( ops_in_project[0].displayname)) connectivity_gid = None datatypes_type = [] for datatype in data_in_project: datatypes_type.append(datatype.type) if datatype.type == ConnectivityIndex().display_type: connectivity_gid = datatype.gid logger.info("The datatypes in project are: {}".format(datatypes_type)) if connectivity_gid: logger.info("Preparing the simulator...") simulator = SimulatorAdapterModel() simulator.connectivity = connectivity_gid simulator.simulation_length = 100 logger.info("Starting the simulation...") operation_gid = tvb_client_instance.fire_simulation( project_gid, simulator) logger.info("Monitoring the simulation operation...") monitor_operation(tvb_client_instance, operation_gid) logger.info("Requesting the results of the simulation...") simulation_results = tvb_client_instance.get_operation_results( operation_gid) datatype_names = [] for datatype in simulation_results: datatype_names.append(datatype.name) logger.info("The resulted datatype are: {}".format(datatype_names)) time_series_gid = simulation_results[1].gid logger.info("Download the time series file...") time_series_path = tvb_client_instance.retrieve_datatype( time_series_gid, tvb_client_instance.temp_folder) logger.info( "The time series file location is: {}".format(time_series_path)) logger.info("Requesting algorithms to run on time series...") algos = tvb_client_instance.get_operations_for_datatype( time_series_gid) algo_names = [algo.displayname for algo in algos] logger.info("Possible algorithms are {}".format(algo_names)) logger.info("Launch Fourier Analyzer...") fourier_model = FFTAdapterModel() fourier_model.time_series = time_series_gid fourier_model.window_function = 'hamming' operation_gid = tvb_client_instance.launch_operation( project_gid, FourierAdapter, fourier_model) logger.info( "Fourier Analyzer operation has launched with gid {}".format( operation_gid)) logger.info("Download the connectivity file...") connectivity_path = tvb_client_instance.retrieve_datatype( connectivity_gid, tvb_client_instance.temp_folder) logger.info( "The connectivity file location is: {}".format(connectivity_path)) logger.info("Loading an entire Connectivity datatype in memory...") connectivity = tvb_client_instance.load_datatype_from_file( connectivity_path) logger.info("Info on current Connectivity: {}".format( connectivity.summary_info())) logger.info( "Loading a chuck from the time series H5 file, as this can be very large..." ) with TimeSeriesH5(time_series_path) as time_series_h5: data_shape = time_series_h5.read_data_shape() chunk = time_series_h5.read_data_slice( tuple([ slice(20), slice(data_shape[1]), slice(data_shape[2]), slice(data_shape[3]) ])) assert chunk.shape[0] == 20 assert chunk.shape[1] == data_shape[1] assert chunk.shape[2] == data_shape[2] assert chunk.shape[3] == data_shape[3] return project_gid, time_series_gid
logger.info("Requesting projects for user {}...".format(username)) projects_of_user = tvb_client.get_project_list(username) assert len(projects_of_user) > 0 logger.info("TVB has {} projects for this user".format( len(projects_of_user))) project_gid = projects_of_user[0].gid logger.info("Requesting datatypes from project {}...".format(project_gid)) data_in_project = tvb_client.get_data_in_project(project_gid) logger.info("We have {} datatypes".format(len(data_in_project))) connectivity_gid = None datatypes_type = [] for datatype in data_in_project: datatypes_type.append(datatype.type) if datatype.type == ConnectivityIndex().display_type: connectivity_gid = datatype.gid logger.info("The datatypes in projecct are: {}".format(datatypes_type)) if connectivity_gid: logger.info("Preparing the simulator...") simulator = SimulatorAdapterModel() simulator.connectivity = uuid.UUID(connectivity_gid) simulator.simulation_length = 100 logger.info("Starting the simulation...") operation_gid = tvb_client.fire_simulation(project_gid, simulator) logger.info("Monitoring the simulation operation...") while True: status = tvb_client.get_operation_status(operation_gid)