示例#1
0
    def __import_time_series_csv_datatype(self, hrf_folder, connectivity_gid,
                                          patient, user_tag):
        path = os.path.join(hrf_folder, self.TIME_SERIES_CSV_FILE)
        with open(path) as csv_file:
            csv_reader = csv.reader(
                csv_file, delimiter=CSVDelimiterOptionsEnum.COMMA.value)
            ts = list(csv_reader)

        ts_data = np.array(ts, dtype=np.float64).reshape(
            (len(ts), 1, len(ts[0]), 1))
        ts_time = np.random.rand(ts_data.shape[0], )

        project = dao.get_project_by_id(self.current_project_id)

        ts_gid = uuid.uuid4()
        h5_path = "TimeSeries_{}.h5".format(ts_gid.hex)
        operation_folder = self.storage_interface.get_project_folder(
            project.name, str(self.operation_id))
        h5_path = os.path.join(operation_folder, h5_path)

        conn = h5.load_from_gid(connectivity_gid)
        ts = TimeSeriesRegion()
        ts.data = ts_data
        ts.time = ts_time
        ts.gid = ts_gid
        ts.connectivity = conn
        generic_attributes = GenericAttributes()
        generic_attributes.user_tag_1 = user_tag
        generic_attributes.state = DEFAULTDATASTATE_RAW_DATA

        with TimeSeriesRegionH5(h5_path) as ts_h5:
            ts_h5.store(ts)
            ts_h5.nr_dimensions.store(4)
            ts_h5.subject.store(patient)
            ts_h5.store_generic_attributes(generic_attributes)

        ts_index = TimeSeriesIndex()
        ts_index.gid = ts_gid.hex
        ts_index.fk_from_operation = self.operation_id
        ts_index.time_series_type = "TimeSeriesRegion"
        ts_index.data_length_1d = ts_data.shape[0]
        ts_index.data_length_2d = ts_data.shape[1]
        ts_index.data_length_3d = ts_data.shape[2]
        ts_index.data_length_4d = ts_data.shape[3]
        ts_index.data_ndim = len(ts_data.shape)
        ts_index.sample_period_unit = 'ms'
        ts_index.sample_period = TimeSeries.sample_period.default
        ts_index.sample_rate = 1024.0
        ts_index.subject = patient
        ts_index.state = DEFAULTDATASTATE_RAW_DATA
        ts_index.labels_ordering = json.dumps(
            list(TimeSeries.labels_ordering.default))
        ts_index.labels_dimensions = json.dumps(
            TimeSeries.labels_dimensions.default)
        ts_index.visible = False  # we don't want to show these TimeSeries because they are dummy
        dao.store_entity(ts_index)

        return ts_gid
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {
            DataTypeMetaData.KEY_SUBJECT: "John Doe",
            DataTypeMetaData.KEY_STATE: "RAW_DATA"
        }
        algo_group = FlowService().get_algorithm_by_module_and_class(
            SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id,
                                         self.test_project.id,
                                         algo_group.id,
                                         json.dumps(''),
                                         meta=json.dumps(meta),
                                         status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(
            self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(
            self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0]
        self.assertEqual(2, len(connectivities))
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance,
                                              {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__,
                                                   dummy_time_series.gid,
                                                   'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure),
                        "Result should be a datatype measure.")
        self.assertTrue(
            len(resulted_metric.metrics) >= len(
                ts_metric_adapter.available_algorithms.keys()),
            "At least a result should have been generated for every metric.")
        for metric_value in resulted_metric.metrics.values():
            self.assertTrue(isinstance(metric_value, (float, int)))
    def test_adapter_launch(self):
        """
        Test that the adapters launches and successfully generates a datatype measure entry.
        """
        meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"}
        algo_group = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)[1]
        self.operation = model.Operation(self.test_user.id, self.test_project.id, algo_group.id, json.dumps(''),
                                         meta=json.dumps(meta), status=model.STATUS_STARTED,
                                         method_name=ABCAdapter.LAUNCH_METHOD)
        self.operation = dao.store_entity(self.operation)
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))
        dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10)
        dummy_time = numpy.arange(1, 11)

        # Get connectivity
        connectivities = FlowService().get_available_datatypes(self.test_project.id,
                                                               "tvb.datatypes.connectivity.Connectivity")[0]
        self.assertEqual(2, len(connectivities))
        connectivity_gid = connectivities[0][2]

        dummy_time_series = TimeSeriesRegion()
        dummy_time_series.storage_path = storage_path
        dummy_time_series.write_data_slice(dummy_input)
        dummy_time_series.write_time_slice(dummy_time)
        dummy_time_series.close_file()
        dummy_time_series.start_time = 0.0
        dummy_time_series.sample_period = 1.0
        dummy_time_series.connectivity = connectivity_gid

        adapter_instance = StoreAdapter([dummy_time_series])
        OperationService().initiate_prelaunch(self.operation, adapter_instance, {})

        dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__, dummy_time_series.gid, 'gid')[0]
        ts_metric_adapter = TimeseriesMetricsAdapter()
        resulted_metric = ts_metric_adapter.launch(dummy_time_series)
        self.assertTrue(isinstance(resulted_metric, DatatypeMeasure), "Result should be a datatype measure.")
        self.assertTrue(len(resulted_metric.metrics) >= len(ts_metric_adapter.available_algorithms.keys()),
                        "At least a result should have been generated for every metric.")
        for metric_value in resulted_metric.metrics.values():
            self.assertTrue(isinstance(metric_value, (float, int)))