def create_timeseries(self, connectivity, ts_type=None, sensors=None): """ Create a stored TimeSeries entity. """ operation, _, storage_path = self.__create_operation() if ts_type == "EEG": time_series = TimeSeriesEEG(storage_path=storage_path, sensors=sensors) else: rm = dao.get_generic_entity(RegionMapping, connectivity.gid, '_connectivity') if len(rm) < 1: rm = None else: rm = rm[0] time_series = TimeSeriesRegion(storage_path=storage_path, connectivity=connectivity, region_mapping=rm) data = numpy.random.random((10, 10, 10, 10)) time_series.write_data_slice(data) time_series.write_time_slice(numpy.arange(10)) adapter_instance = StoreAdapter([time_series]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) time_series = dao.get_datatype_by_gid(time_series.gid) return time_series
def _burst_create_connectivity(self): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). TODO: This is duplicate code from burstservice_test. Should go into the 'generic' DataType factory once that is done. """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } algorithm, algo_group = FlowService( ).get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((74, 74)) connectivity.centres = numpy.ones((74, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) return connectivity
def _burst_create_connectivity(self): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } self.operation = model.Operation(self.test_user.id, self.test_project.id, self.sim_algorithm.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((74, 74)) connectivity.centres = numpy.ones((74, 3)) adapter_instance = StoreAdapter([connectivity]) self.operation_service.initiate_prelaunch(self.operation, adapter_instance, {}) return connectivity
def _store_entity(self, entity, type_, module): """Launch adapter to store a create a persistent DataType.""" entity.type = type_ entity.module = module entity.subject = "John Doe" entity.state = "RAW_STATE" entity.set_operation_id(self.operation.id) adapter_instance = StoreAdapter([entity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {})
def create_covariance(self, time_series): """ :returns: a stored DataType Covariance. """ operation, _, storage_path = self.__create_operation() covariance = Covariance(storage_path=storage_path, source=time_series) covariance.write_data_slice(numpy.random.random((10, 10, 10))) adapter_instance = StoreAdapter([covariance]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return covariance
def create_connectivity_measure(self, connectivity): """ :returns: persisted entity ConnectivityMeasure """ operation, _, storage_path = self.__create_operation() conn_measure = ConnectivityMeasure(storage_path=storage_path) conn_measure.connectivity = connectivity adapter_instance = StoreAdapter([conn_measure]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return conn_measure
def test_adapter_launch(self): """ Test that the adapters launches and successfully generates a datatype measure entry. """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } algo_group = FlowService().get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS)[1] self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED, method_name=ABCAdapter.LAUNCH_METHOD) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10) dummy_time = numpy.arange(1, 11) # Get connectivity connectivities = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0] self.assertEqual(2, len(connectivities)) connectivity_gid = connectivities[0][2] dummy_time_series = TimeSeriesRegion() dummy_time_series.storage_path = storage_path dummy_time_series.write_data_slice(dummy_input) dummy_time_series.write_time_slice(dummy_time) dummy_time_series.close_file() dummy_time_series.start_time = 0.0 dummy_time_series.sample_period = 1.0 dummy_time_series.connectivity = connectivity_gid adapter_instance = StoreAdapter([dummy_time_series]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__, dummy_time_series.gid, 'gid')[0] ts_metric_adapter = TimeseriesMetricsAdapter() resulted_metric = ts_metric_adapter.launch(dummy_time_series) self.assertTrue(isinstance(resulted_metric, DatatypeMeasure), "Result should be a datatype measure.") self.assertTrue( len(resulted_metric.metrics) >= len( ts_metric_adapter.available_algorithms.keys()), "At least a result should have been generated for every metric.") for metric_value in resulted_metric.metrics.values(): self.assertTrue(isinstance(metric_value, (float, int)))
def create_connectivity(self, nodes=74): """ Create a connectivity that will be used in "non-dummy" burst launches (with the actual simulator). """ operation, algo_id, storage_path = self.__create_operation() connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((nodes, nodes)) connectivity.centres = numpy.ones((nodes, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return algo_id, connectivity
def create_datatype_measure(self, analyzed_entity, operation=None, storage_path=None): """ :return: persisted DatatypeMeasure """ if operation is None: operation, _, storage_path = self.__create_operation() measure = DatatypeMeasure(storage_path=storage_path, metrics=self.DATATYPE_MEASURE_METRIC) measure.analyzed_datatype = analyzed_entity adapter_instance = StoreAdapter([measure]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return measure
def create_crosscoherence(self, time_series): """ :returns: a stored entity of type CoherenceSpectrum """ operation, _, storage_path = self.__create_operation() partial_coh = CoherenceSpectrum(array_data=numpy.random.random((10, 10, 10, 10)), use_storage=False) coherence = CoherenceSpectrum(source=time_series, storage_path=storage_path, frequency=0.1, nfft=256) coherence.write_data_slice(partial_coh) coherence.close_file() adapter_instance = StoreAdapter([coherence]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return coherence
def create_crosscorrelation(self, time_series): """ :returns: `CrossCorrelation` stored entity. """ operation, _, storage_path = self.__create_operation() partial_corr = CrossCorrelation(array_data=numpy.random.random((10, 10, 10, 10, 10)), use_storage=False) crossc = CrossCorrelation(source=time_series, storage_path=storage_path, time=range(10)) crossc.write_data_slice(partial_corr) crossc.close_file() adapter_instance = StoreAdapter([crossc]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return crossc
def _create_connectivity(self, nodes_number): """ Create a connectivity entity and return its GID """ storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) connectivity = Connectivity(storage_path=storage_path) connectivity.weights = numpy.ones((nodes_number, nodes_number)) connectivity.centres = numpy.ones((nodes_number, 3)) adapter_instance = StoreAdapter([connectivity]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) return dao.get_datatype_by_id(connectivity.id).gid
def create_stimulus(self, connectivity): """ :param connectivity: Connectivity to create stimuli for its regions :return: persisted region Stimuli instance """ operation, _, storage_path = self.__create_operation() stimuli_region = StimuliRegion(storage_path=storage_path) stimuli_region.connectivity = connectivity stimuli_region.weight = numpy.random.random((connectivity.number_of_regions, 1)).tolist() stimuli_region.temporal = PulseTrain() adapter_instance = StoreAdapter([stimuli_region]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return stimuli_region
def _store_datatype(self, data_type, operation_id=None): """ Launch adapter to store a create a persistent DataType. """ operation_id = operation_id or self.operation.id data_type.type = data_type.__class__.__name__ data_type.module = data_type.__class__.__module__ data_type.subject = self.USER_FULL_NAME data_type.state = self.DATATYPE_STATE data_type.set_operation_id(operation_id) adapter_instance = StoreAdapter([data_type]) operation = dao.get_operation_by_id(operation_id) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return data_type
def create_ICA(self, timeseries): """ :returns: persisted entity IndependentComponents """ operation, _, storage_path = self.__create_operation() partial_ts = TimeSeries(use_storage=False) partial_ts.data = numpy.random.random((10, 10, 10, 10)) partial_ica = IndependentComponents(source=partial_ts, component_time_series=numpy.random.random((10, 10, 10, 10)), prewhitening_matrix=numpy.random.random((10, 10, 10, 10)), unmixing_matrix=numpy.random.random((10, 10, 10, 10)), n_components=10, use_storage=False) ica = IndependentComponents(source=timeseries, n_components=10, storage_path=storage_path) ica.write_data_slice(partial_ica) adapter_instance = StoreAdapter([ica]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return ica
def create_surface(self): """ Create a dummy surface entity. :returns: (Algorithm Identifier, stored Surface entity) """ operation, algo_id, storage_path = self.__create_operation() surface = CorticalSurface(storage_path=storage_path) surface.vertices = numpy.array( [[-10, 0, 0], [0, 0, -10], [10, 0, 0], [0, 10, 0]], dtype=float) surface.triangles = numpy.array( [[0, 1, 2], [0, 1, 3], [1, 2, 3], [0, 2, 3]], dtype=int) surface.number_of_triangles = 4 surface.number_of_vertices = 4 surface.triangle_normals = numpy.ones((4, 3)) surface.vertex_normals = numpy.ones((4, 3)) surface.zero_based_triangles = True surface.validate() adapter_instance = StoreAdapter([surface]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return algo_id, surface
def _create_value_wrapper(test_user, test_project=None): """ Creates a ValueWrapper dataType, and the associated parent Operation. This is also used in ProjectStructureTest. """ if test_project is None: test_project = TestFactory.create_project(test_user, 'test_proj') operation = TestFactory.create_operation(test_user=test_user, test_project=test_project) value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value") value_wrapper.type = "ValueWrapper" value_wrapper.module = "tvb.datatypes.mapped_values" value_wrapper.subject = "John Doe" value_wrapper.state = "RAW_STATE" value_wrapper.set_operation_id(operation.id) adapter_instance = StoreAdapter([value_wrapper]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) all_value_wrappers = FlowService().get_available_datatypes(test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0] if len(all_value_wrappers) != 1: raise Exception("Should be only one value wrapper.") result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2]) return test_project, result_vw.gid, operation.gid