def launch(self, time_series, dt=None, bold_model=None, RBM=None, neural_input_transformation=None): """ Launch algorithm and build results. :param time_series: the input time-series used as neural activation in the Balloon Model :returns: the simulated BOLD signal :rtype: `TimeSeries` """ time_line = time_series.read_time_page(0, self.input_shape[0]) bold_signal = TimeSeriesRegion(storage_path=self.storage_path, sample_period=time_series.sample_period, start_time=time_series.start_time, connectivity=time_series.connectivity) ##---------- Iterate over slices and compose final result ------------## node_slice = [slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3])] small_ts = TimeSeries(use_storage=False, sample_period=time_series.sample_period, time=time_line) for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_bold = self.algorithm.evaluate() bold_signal.write_data_slice(partial_bold.data, grow_dimension=2) bold_signal.write_time_slice(time_line) bold_signal.close_file() return bold_signal
def compute_fcd(con, bold_data,parameter_monitor,wind_len=180e3,wind_sp=4e3): bold_period = parameter_monitor['parameter_Bold']['period'] # Build the time series object tsr = TimeSeriesRegion(connectivity=con, data=bold_data, sample_period=bold_period) tsr.configure() # Create and evaluate the analysis fcd_analyser = fcd.FcdCalculator(time_series=tsr, sw=wind_len, sp=wind_sp) fcd_data = fcd_analyser.evaluate() # Store the results FCD=fcd_data[0][:,:,0,0] return FCD
def create_timeseries(self, connectivity, ts_type=None, sensors=None): """ Create a stored TimeSeries entity. """ operation, _, storage_path = self.__create_operation() if ts_type == "EEG": time_series = TimeSeriesEEG(storage_path=storage_path, sensors=sensors) else: rm = dao.get_generic_entity(RegionMapping, connectivity.gid, '_connectivity') if len(rm) < 1: rm = None else: rm = rm[0] time_series = TimeSeriesRegion(storage_path=storage_path, connectivity=connectivity, region_mapping=rm) data = numpy.random.random((10, 10, 10, 10)) time_series.write_data_slice(data) time_series.write_time_slice(numpy.arange(10)) adapter_instance = StoreAdapter([time_series]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) time_series = dao.get_datatype_by_gid(time_series.gid) return time_series
def build(connectivity, region_mapping, test_user=None, test_project=None): time = numpy.linspace(0, 1000, 4000) data = numpy.zeros((time.size, 1, 3, 1)) data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40) data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200) data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + \ numpy.sin(2 * numpy.pi * time / 1000.0 * 300) ts = TimeSeriesRegion(time=time, data=data, sample_period=1.0 / 4000, connectivity=connectivity, region_mapping=region_mapping) op = operation_factory(test_user=test_user, test_project=test_project) ts_db = TimeSeriesRegionIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesRegionH5(ts_h5_path) as f: f.store(ts) f.sample_rate.store(ts.sample_rate) f.nr_dimensions.store(ts.data.ndim) ts_db = dao.store_entity(ts_db) return ts_db
def test_import_bold(self): ### Retrieve Adapter instance importer = TestFactory.create_adapter( 'tvb.adapters.uploaders.mat_timeseries_importer', 'MatTimeSeriesImporter') args = dict( data_file=self.bold_path, dataset_name='QL_20120824_DK_BOLD_timecourse', structure_path='', transpose=False, slice=None, sampling_rate=1000, start_time=0, tstype='region', tstype_parameters_option_region_connectivity=self.connectivity.gid, Data_Subject="QL") ### Launch import Operation FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args) tsr = TestFactory.get_entity(self.test_project, TimeSeriesRegion()) self.assertEqual((661, 1, 68, 1), tsr.read_data_shape())
def create_time_series(self, storage_path, connectivity=None, surface=None, region_map=None, region_volume_map=None): """ Create a time series instance that will be populated by this monitor :param surface: if present a TimeSeriesSurface is returned :param connectivity: if present a TimeSeriesRegion is returned Otherwise a plain TimeSeries will be returned """ if surface is not None: return TimeSeriesSurface(storage_path=storage_path, surface=surface, sample_period=self.period, title='Surface ' + self.__class__.__name__, **self._transform_user_tags()) if connectivity is not None: return TimeSeriesRegion(storage_path=storage_path, connectivity=connectivity, region_mapping=region_map, region_mapping_volume=region_volume_map, sample_period=self.period, title='Regions ' + self.__class__.__name__, **self._transform_user_tags()) return TimeSeries(storage_path=storage_path, sample_period=self.period, title=' ' + self.__class__.__name__, **self._transform_user_tags())
def create_region_ts(self, data, connectivity): if connectivity.number_of_regions != data.shape[1]: raise LaunchException( "Data has %d channels but the connectivity has %d nodes" % (data.shape[1], connectivity.number_of_regions)) return TimeSeriesRegion(storage_path=self.storage_path, connectivity=connectivity)
def create_time_series(self, connectivity=None, surface=None, region_map=None, region_volume_map=None): return TimeSeriesRegion(connectivity=connectivity, region_mapping=region_map, region_mapping_volume=region_volume_map, sample_period=self.period, title='Regions ' + self.__class__.__name__)
def compute_fc(con, bold_data, parameter_monitor): import tvb.analyzers.correlation_coefficient as corr_coeff from tvb.datatypes.time_series import TimeSeriesRegion bold_period = parameter_monitor['parameter_Bold']['period'] # Remove transient bold_data = bold_data[10:, :, :] tsr = TimeSeriesRegion(connectivity=con, data=bold_data, sample_period=bold_period) tsr.configure() # Compute FC corrcoeff_analyser = corr_coeff.CorrelationCoefficient(time_series=tsr) corrcoeff_data = corrcoeff_analyser.evaluate() corrcoeff_data.configure() FC = corrcoeff_data.array_data[..., 0, 0] return FC
def build(connectivity, region_mapping): time = numpy.linspace(0, 1000, 4000) data = numpy.zeros((time.size, 1, 3, 1)) data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40) data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200) data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + \ numpy.sin(2 * numpy.pi * time / 1000.0 * 300) ts = TimeSeriesRegion(time=time, data=data, sample_period=1.0 / 4000, connectivity=connectivity, region_mapping=region_mapping) return ts
def create_time_series(self, connectivity=None, surface=None, region_map=None, region_volume_map=None): if self.is_default_special_mask: return TimeSeriesRegion(sample_period=self.period, region_mapping=region_map, region_mapping_volume=region_volume_map, title='Regions ' + self.__class__.__name__, connectivity=connectivity) else: # mask does not correspond to the number of regions # let the parent create a plain TimeSeries return super(SpatialAverage, self).create_time_series()
def create_region_ts(self, data_shape, connectivity): if connectivity.number_of_regions != data_shape[1]: raise LaunchException("Data has %d channels but the connectivity has %d nodes" % (data_shape[1], connectivity.number_of_regions)) ts_idx = TimeSeriesRegionIndex() ts_idx.fk_connectivity_gid = connectivity.gid ts_idx.has_surface_mapping = True ts_h5_path = h5.path_for(self.storage_path, TimeSeriesRegionH5, ts_idx.gid) ts_h5 = TimeSeriesRegionH5(ts_h5_path) ts_h5.connectivity.store(uuid.UUID(connectivity.gid)) return TimeSeriesRegion(), ts_idx, ts_h5
def __import_time_series_csv_datatype(self, hrf_folder, connectivity_gid, patient, user_tag): path = os.path.join(hrf_folder, self.TIME_SERIES_CSV_FILE) with open(path) as csv_file: csv_reader = csv.reader( csv_file, delimiter=CSVDelimiterOptionsEnum.COMMA.value) ts = list(csv_reader) ts_data = np.array(ts, dtype=np.float64).reshape( (len(ts), 1, len(ts[0]), 1)) ts_time = np.random.rand(ts_data.shape[0], ) project = dao.get_project_by_id(self.current_project_id) ts_gid = uuid.uuid4() h5_path = "TimeSeries_{}.h5".format(ts_gid.hex) operation_folder = self.storage_interface.get_project_folder( project.name, str(self.operation_id)) h5_path = os.path.join(operation_folder, h5_path) conn = h5.load_from_gid(connectivity_gid) ts = TimeSeriesRegion() ts.data = ts_data ts.time = ts_time ts.gid = ts_gid ts.connectivity = conn generic_attributes = GenericAttributes() generic_attributes.user_tag_1 = user_tag generic_attributes.state = DEFAULTDATASTATE_RAW_DATA with TimeSeriesRegionH5(h5_path) as ts_h5: ts_h5.store(ts) ts_h5.nr_dimensions.store(4) ts_h5.subject.store(patient) ts_h5.store_generic_attributes(generic_attributes) ts_index = TimeSeriesIndex() ts_index.gid = ts_gid.hex ts_index.fk_from_operation = self.operation_id ts_index.time_series_type = "TimeSeriesRegion" ts_index.data_length_1d = ts_data.shape[0] ts_index.data_length_2d = ts_data.shape[1] ts_index.data_length_3d = ts_data.shape[2] ts_index.data_length_4d = ts_data.shape[3] ts_index.data_ndim = len(ts_data.shape) ts_index.sample_period_unit = 'ms' ts_index.sample_period = TimeSeries.sample_period.default ts_index.sample_rate = 1024.0 ts_index.subject = patient ts_index.state = DEFAULTDATASTATE_RAW_DATA ts_index.labels_ordering = json.dumps( list(TimeSeries.labels_ordering.default)) ts_index.labels_dimensions = json.dumps( TimeSeries.labels_dimensions.default) ts_index.visible = False # we don't want to show these TimeSeries because they are dummy dao.store_entity(ts_index) return ts_gid
def create_region_ts(self, data_shape, connectivity): if connectivity.number_of_regions != data_shape[1]: raise LaunchException( "Data has %d channels but the connectivity has %d nodes" % (data_shape[1], connectivity.number_of_regions)) ts_idx = TimeSeriesRegionIndex() ts_idx.fk_connectivity_gid = connectivity.gid region_map_indexes = dao.get_generic_entity(RegionMappingIndex, connectivity.gid, 'fk_connectivity_gid') ts_idx.has_surface_mapping = False if len(region_map_indexes) > 0: ts_idx.fk_region_mapping_gid = region_map_indexes[0].gid ts_idx.has_surface_mapping = True ts_h5_path = self.path_for(TimeSeriesRegionH5, ts_idx.gid) ts_h5 = TimeSeriesRegionH5(ts_h5_path) ts_h5.connectivity.store(uuid.UUID(connectivity.gid)) return TimeSeriesRegion(), ts_idx, ts_h5
LOG = get_logger(__name__) #Load the demo region timeseries dataset try: data = numpy.load("demo_data_region_16s_2048Hz.npy") except IOError: LOG.error("Can't load demo data. Run demos/generate_region_demo_data.py") raise period = 0.00048828125 # s #Put the data into a TimeSeriesRegion datatype white_matter = connectivity.Connectivity() tsr = TimeSeriesRegion(connectivity=white_matter, data=data, sample_period=period) tsr.configure() #Create and run the analyser corrcoeff_analyser = corr_coeff.CorrelationCoefficient(time_series=tsr) corrcoeff_data = corrcoeff_analyser.evaluate() #Generate derived data, if any... corrcoeff_data.configure() # Plot matrix with numbers # For visualization purposes, the diagonal is set to zero. FC = corrcoeff_data.array_data[:, :, 0, 0] numpy.fill_diagonal(FC, 0.) pyplot.matshow(FC, cmap='RdBu', vmin=-0.5, vmax=0.5, interpolation='nearest')
def create_time_series_region_object(): config = CONFIGURED connectivity = Connectivity.from_file(config.DEFAULT_CONNECTIVITY_ZIP) connectivity.configure() simulator = Simulator() simulator.model = ReducedWongWangExcIOInhI() def boundary_fun(state): state[state < 0] = 0.0 state[state > 1] = 1.0 return state simulator.boundary_fun = boundary_fun simulator.connectivity = connectivity simulator.integrator.dt = \ float(int(numpy.round(simulator.integrator.dt / config.nest.NEST_MIN_DT))) * config.nest.NEST_MIN_DT mon_raw = Raw(period=simulator.integrator.dt) simulator.monitors = (mon_raw, ) number_of_regions = simulator.connectivity.region_labels.shape[0] nest_nodes_ids = [] for id in range(number_of_regions): if simulator.connectivity.region_labels[id].find("hippo") > 0: nest_nodes_ids.append(id) nest_model_builder = \ RedWWExcIOInhIBuilder(simulator, nest_nodes_ids, config=config) nest_network = nest_model_builder.build_nest_network() tvb_nest_builder = \ RedWWexcIOinhIBuilder(simulator, nest_network, nest_nodes_ids, config=config) tvb_nest_builder.tvb_to_nest_interfaces = \ [{"model": "current", "parameter": "I_e", "sign": 1, "connections": {"S_e": ["E", "I"]}}] connections = OrderedDict() connections["R_e"] = "E" connections["R_i"] = "I" tvb_nest_builder.nest_to_tvb_interfaces = \ [{"model": "spike_detector", "params": {}, "connections": connections}] tvb_nest_model = tvb_nest_builder.build_interface() simulator.configure(tvb_nest_interface=tvb_nest_model) results = simulator.run(simulation_length=100.0) time = results[0][0] source = results[0][1] source_ts = TimeSeriesRegion( data=source, time=time, connectivity=simulator.connectivity, labels_ordering=[ "Time", "Synaptic Gating Variable", "Region", "Neurons" ], labels_dimensions={ "Synaptic Gating Variable": ["S_e", "S_i"], "Region": simulator.connectivity.region_labels.tolist() }, sample_period=simulator.integrator.dt) return source_ts
def test_adapter_launch(self): """ Test that the adapters launches and successfully generates a datatype measure entry. """ meta = {DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA"} algo = FlowService().get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id)) dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10) dummy_time = numpy.arange(1, 11) # Get connectivity connectivities = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0] self.assertEqual(2, len(connectivities)) connectivity_gid = connectivities[0][2] dummy_time_series = TimeSeriesRegion() dummy_time_series.storage_path = storage_path dummy_time_series.write_data_slice(dummy_input) dummy_time_series.write_time_slice(dummy_time) dummy_time_series.close_file() dummy_time_series.start_time = 0.0 dummy_time_series.sample_period = 1.0 dummy_time_series.connectivity = connectivity_gid adapter_instance = StoreAdapter([dummy_time_series]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__, dummy_time_series.gid, 'gid')[0] ts_metric_adapter = TimeseriesMetricsAdapter() resulted_metric = ts_metric_adapter.launch(dummy_time_series) self.assertTrue(isinstance(resulted_metric, DatatypeMeasure), "Result should be a datatype measure.") self.assertTrue(len(resulted_metric.metrics) >= len(ts_metric_adapter.available_algorithms.keys()), "At least a result should have been generated for every metric.") for metric_value in resulted_metric.metrics.values(): self.assertTrue(isinstance(metric_value, (float, int)))
def test_adapter_launch(self): """ Test that the adapters launches and successfully generates a datatype measure entry. """ meta = { DataTypeMetaData.KEY_SUBJECT: "John Doe", DataTypeMetaData.KEY_STATE: "RAW_DATA" } algo = FlowService().get_algorithm_by_module_and_class( SIMULATOR_MODULE, SIMULATOR_CLASS) self.operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, json.dumps(''), meta=json.dumps(meta), status=model.STATUS_STARTED) self.operation = dao.store_entity(self.operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(self.operation.id)) dummy_input = numpy.arange(1, 10001).reshape(10, 10, 10, 10) dummy_time = numpy.arange(1, 11) # Get connectivity connectivities = FlowService().get_available_datatypes( self.test_project.id, "tvb.datatypes.connectivity.Connectivity")[0] self.assertEqual(2, len(connectivities)) connectivity_gid = connectivities[0][2] dummy_time_series = TimeSeriesRegion() dummy_time_series.storage_path = storage_path dummy_time_series.write_data_slice(dummy_input) dummy_time_series.write_time_slice(dummy_time) dummy_time_series.close_file() dummy_time_series.start_time = 0.0 dummy_time_series.sample_period = 1.0 dummy_time_series.connectivity = connectivity_gid adapter_instance = StoreAdapter([dummy_time_series]) OperationService().initiate_prelaunch(self.operation, adapter_instance, {}) dummy_time_series = dao.get_generic_entity(dummy_time_series.__class__, dummy_time_series.gid, 'gid')[0] ts_metric_adapter = TimeseriesMetricsAdapter() resulted_metric = ts_metric_adapter.launch(dummy_time_series) self.assertTrue(isinstance(resulted_metric, DatatypeMeasure), "Result should be a datatype measure.") self.assertTrue( len(resulted_metric.metrics) >= len( ts_metric_adapter.available_algorithms.keys()), "At least a result should have been generated for every metric.") for metric_value in resulted_metric.metrics.values(): self.assertTrue(isinstance(metric_value, (float, int)))
import tvb.analyzers.correlation_coefficient as corr_coeff from tvb.datatypes.time_series import TimeSeriesRegion #Load the demo region timeseries dataset try: data = numpy.load("demo_data_region_16s_2048Hz.npy") except IOError: LOG.error("Can't load demo data. Run demos/generate_region_demo_data.py") raise period = 0.00048828125 # s #Put the data into a TimeSeriesRegion datatype white_matter = connectivity.Connectivity(load_default=True) tsr = TimeSeriesRegion(connectivity=white_matter, data=data, sample_period=period) tsr.configure() #Create and run the analyser corrcoeff_analyser = corr_coeff.CorrelationCoefficient(time_series=tsr) corrcoeff_data = corrcoeff_analyser.evaluate() #Generate derived data corrcoeff_data.configure() # For visualization purposes, the diagonal is set to zero. FC = corrcoeff_data.array_data[:, :, 0, 0] #Display the correlation matrix fig01 = plot_tri_matrix(white_matter.tract_lengths, cmap=pyplot.cm.RdYlBu_r,
LOG.info("Finished simulation.") ##----------------------------------------------------------------------------## ##- Plot pretty pictures of what we just did -## ##----------------------------------------------------------------------------## #Make the lists numpy.arrays for easier use. LOG.info("Converting result to array...") TAVG_TIME = numpy.array(tavg_time) BOLD_TIME = numpy.array(bold_time) BOLD = numpy.array(bold_data) TAVG = numpy.array(tavg_data) #Create TimeSeries instance tsr = TimeSeriesRegion(data=TAVG, time=TAVG_TIME, sample_period=2.) tsr.configure() #Create and run the monitor/analyser bold_model = bold.BalloonModel(time_series=tsr) bold_data = bold_model.evaluate() bold_tsr = TimeSeriesRegion(connectivity=white_matter, data=bold_data.data, time=bold_data.time) #Prutty puctures... tsi = timeseries_interactive.TimeSeriesInteractive(time_series=bold_tsr) tsi.configure() tsi.show()
def __init__(self, input=numpy.array([[], []]), **kwargs): if isinstance(input, (Timeseries, TimeSeries)): if isinstance(input, Timeseries): self._tvb = deepcopy(input._tvb) self.ts_type = str(input.ts_type) elif isinstance(input, TimeSeries): self._tvb = deepcopy(input) if isinstance(input, TimeSeriesRegion): self.ts_type = "Region" if isinstance(input, TimeSeriesSEEG): self.ts_type = "SEEG" elif isinstance(input, TimeSeriesEEG): self.ts_type = "EEG" elif isinstance(input, TimeSeriesMEG): self.ts_type = "MEG" elif isinstance(input, TimeSeriesEEG): self.ts_type = "EEG" elif isinstance(input, TimeSeriesVolume): self.ts_type = "Volume" elif isinstance(input, TimeSeriesSurface): self.ts_type = "Surface" else: self.ts_type = "" warning( "Input TimeSeries %s is not one of the known TVB TimeSeries classes!" % str(input)) for attr, value in kwargs.items(): try: setattr(self, attr, value) except: setattr(self._tvb, attr, value) elif isinstance(input, numpy.ndarray): input = prepare_4D(input, self.logger) time = kwargs.pop("time", None) if time is not None: start_time = float( kwargs.pop("start_time", kwargs.pop("start_time", time[0]))) sample_period = float( kwargs.pop( "sample_period", kwargs.pop("sample_period", numpy.mean(numpy.diff(time))))) kwargs.update({ "start_time": start_time, "sample_period": sample_period }) # Initialize self.ts_type = kwargs.pop("ts_type", "Region") labels_ordering = kwargs.get("labels_ordering", None) # Get input sensors if any input_sensors = None if isinstance(kwargs.get("sensors", None), (TVBSensors, Sensors)): if isinstance(kwargs["sensors"], Sensors): input_sensors = kwargs["sensors"]._tvb self.ts_type = "%s sensor" % input_sensors.sensors_type kwargs.update({"sensors": input_sensors}) else: input_sensors = kwargs["sensors"] # Create Timeseries if isinstance(input_sensors, TVBSensors) or \ self.ts_type in ["SEEG sensor", "Internal sensor", "EEG sensor", "MEG sensor"]: # ...for Sensor Timeseries if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "%s sensor" % self.ts_type kwargs.update({"labels_ordering": labels_ordering}) if isinstance(input_sensors, TVBSensorsInternal) or isequal_string(self.ts_type, "Internal sensor")\ or isequal_string(self.ts_type, "SEEG sensor"): self._tvb = TimeSeriesSEEG(data=input, **kwargs) self.ts_type = "SEEG sensor" elif isinstance(input_sensors, TVBSensorsEEG) or isequal_string( self.ts_type, "EEG sensor"): self._tvb = TimeSeriesEEG(data=input, **kwargs) self.ts_type = "EEG sensor" elif isinstance(input_sensors, TVBSensorsMEG) or isequal_string( self.ts_type, "MEG sensor"): self._tvb = TimeSeriesMEG(data=input, **kwargs) self.ts_type = "MEG sensor" else: raise_value_error( "Not recognizing sensors of type %s:\n%s" % (self.ts_type, str(input_sensors))) else: input_surface = kwargs.pop("surface", None) if isinstance( input_surface, (Surface, TVBSurface)) or self.ts_type == "Surface": self.ts_type = "Surface" if isinstance(input_surface, Surface): kwargs.update({"surface": input_surface._tvb}) else: kwargs.update({"surface": input_surface}) if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "Vertex" kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesSurface(data=input, **kwargs) elif isequal_string(self.ts_type, "Region"): if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "Region" kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesRegion(data=input, **kwargs) # , **kwargs elif isequal_string(self.ts_type, "Volume"): if labels_ordering is None: labels_ordering = ["Time", "X", "Y", "Z"] kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesVolume(data=input, **kwargs) else: self._tvb = TimeSeries(data=input, **kwargs) if not numpy.all([ dim_label in self._tvb.labels_dimensions.keys() for dim_label in self._tvb.labels_ordering ]): warning( "Lack of correspondance between timeseries labels_ordering %s\n" "and labels_dimensions!: %s" % (self._tvb.labels_ordering, self._tvb.labels_dimensions.keys())) self._tvb.configure() self.configure_time() self.configure_sample_rate() if len(self.title) == 0: self._tvb.title = "%s Time Series" % self.ts_type
from tvb.simulator.plot import timeseries_interactive as timeseries_interactive from tvb.simulator.plot.tools import * #Load the demo region timeseries dataset try: data = numpy.load("demo_data_region_16s_2048Hz.npy") except IOError: LOG.error("Can't load demo data. Run demos/generate_region_demo_data.py") raise period = 0.00048828125 #s #Put the data into a TimeSeriesRegion datatype white_matter = connectivity.Connectivity() tsr = TimeSeriesRegion(connectivity = white_matter, data = data, sample_period = period) tsr.configure() #Create and run the analyser pca_analyser = pca.PCA(time_series = tsr) pca_data = pca_analyser.evaluate() #Generate derived data, such as, compnent time series, etc. pca_data.configure() #Put the data into a TimeSeriesSurface datatype component_tsr = TimeSeriesRegion(connectivity = white_matter, data = pca_data.component_time_series, sample_period = period) component_tsr.configure()
from tvb.simulator.plot import timeseries_interactive as timeseries_interactive from tvb.simulator.plot.tools import * #Load the demo region timeseries dataset try: data = numpy.load("demo_data_region_16s_2048Hz.npy") except IOError: LOG.error("Can't load demo data. Run demos/generate_region_demo_data.py") raise period = 0.00048828125 #s #Put the data into a TimeSeriesRegion datatype white_matter = connectivity.Connectivity() tsr = TimeSeriesRegion(connectivity=white_matter, data=data, sample_period=period) tsr.configure() #Create and run the analyser pca_analyser = pca.PCA(time_series=tsr) pca_data = pca_analyser.evaluate() #Generate derived data, such as, compnent time series, etc. pca_data.configure() #Put the data into a TimeSeriesSurface datatype component_tsr = TimeSeriesRegion(connectivity=white_matter, data=pca_data.component_time_series, sample_period=period) component_tsr.configure()