Ejemplo n.º 1
0
    def create_time_series(self,
                           storage_path,
                           connectivity=None,
                           surface=None,
                           region_map=None,
                           region_volume_map=None):
        """
        Create a time series instance that will be populated by this monitor
        :param surface: if present a TimeSeriesSurface is returned
        :param connectivity: if present a TimeSeriesRegion is returned
        Otherwise a plain TimeSeries will be returned
        """
        if surface is not None:
            return TimeSeriesSurface(storage_path=storage_path,
                                     surface=surface,
                                     sample_period=self.period,
                                     title='Surface ' +
                                     self.__class__.__name__,
                                     **self._transform_user_tags())
        if connectivity is not None:
            return TimeSeriesRegion(storage_path=storage_path,
                                    connectivity=connectivity,
                                    region_mapping=region_map,
                                    region_mapping_volume=region_volume_map,
                                    sample_period=self.period,
                                    title='Regions ' + self.__class__.__name__,
                                    **self._transform_user_tags())

        return TimeSeries(storage_path=storage_path,
                          sample_period=self.period,
                          title=' ' + self.__class__.__name__,
                          **self._transform_user_tags())
Ejemplo n.º 2
0
    def _parse_timeseries(self, data_arrays):
        # Create TVB time series to be filled
        time_series = TimeSeriesSurface()
        time_series.start_time = 0.0
        time_series.sample_period = 1.0

        # First process first data_array and extract important data from it's metadata
        meta_dict = self._get_meta_dict(data_arrays[0])
        sample_period = meta_dict.get(self.TIME_STEP_ATTR)
        time_series.subject = meta_dict.get(self.SUBJECT_ATTR)
        time_series.title = meta_dict.get(self.NAME_ATTR)

        if sample_period:
            time_series.sample_period = float(sample_period)
            time_series.sample_rate = 1 / time_series.sample_period

        return time_series, data_arrays
Ejemplo n.º 3
0
    def _parse_timeseries(self, data_arrays):
        # Create TVB time series to be filled
        time_series = TimeSeriesSurface()
        time_series.storage_path = self.storage_path
        time_series.set_operation_id(self.operation_id)
        time_series.start_time = 0.0
        time_series.sample_period = 1.0

        # First process first data_array and extract important data from it's metadata
        meta_dict = self._get_meta_dict(data_arrays[0])
        gid = meta_dict.get(self.UNIQUE_ID_ATTR)
        sample_period = meta_dict.get(self.TIME_STEP_ATTR)
        time_series.subject = meta_dict.get(self.SUBJECT_ATTR)
        time_series.title = meta_dict.get(self.NAME_ATTR)

        if gid:
            time_series.gid = gid.replace("{", "").replace("}", "")
        if sample_period:
            time_series.sample_period = float(sample_period)
        # todo : make sure that write_time_slice is not required here
        # Now read time series data
        for data_array in data_arrays:
            time_series.write_data_slice([data_array.data])

        # Close file after writing data
        time_series.close_file()

        return time_series
Ejemplo n.º 4
0
#Load the demo surface timeseries dataset
try:
    data = numpy.load("demo_data_surface_8s_2048Hz.npy")
except IOError:
    LOG.error("Can't load demo data. Run demos/generate_surface_demo_data.py")
    raise

period = 0.00048828125  # s

#Initialse a default surface
default_cortex = surfaces.Cortex.from_file()

#Put the data into a TimeSeriesSurface datatype
tsr = TimeSeriesSurface(surface=default_cortex,
                        data=data,
                        sample_period=period)
tsr.configure()

#Create and run the analyser
pca_analyser = pca.PCA(time_series=tsr)
pca_data = pca_analyser.evaluate()

#Generate derived data, such as, component time series, etc.
pca_data.configure()

#Put the data into a TimeSeriesSurface datatype
component_tsr = TimeSeriesSurface(surface=default_cortex,
                                  data=pca_data.component_time_series,
                                  sample_period=period)
component_tsr.configure()
Ejemplo n.º 5
0
    def _parse_timeseries(self, data_arrays):
        # Create TVB time series to be filled
        time_series = TimeSeriesSurface()
        time_series.storage_path = self.storage_path
        time_series.set_operation_id(self.operation_id)
        time_series.start_time = 0.0
        time_series.sample_period = 1.0

        # First process first data_array and extract important data from it's metadata
        meta_dict = self._get_meta_dict(data_arrays[0])
        gid = meta_dict.get(self.UNIQUE_ID_ATTR)
        sample_period = meta_dict.get(self.TIME_STEP_ATTR)
        time_series.subject = meta_dict.get(self.SUBJECT_ATTR)
        time_series.title = meta_dict.get(self.NAME_ATTR)

        if gid:
            time_series.gid = gid.replace("{", "").replace("}", "")
        if sample_period:
            time_series.sample_period = float(sample_period)
        # todo : make sure that write_time_slice is not required here
        # Now read time series data
        for data_array in data_arrays:
            time_series.write_data_slice([data_array.data])

        # Close file after writing data
        time_series.close_file()

        return time_series
#Load the demo surface timeseries dataset 
try:
    data = numpy.load("demo_data_surface_8s_2048Hz.npy")
except IOError:
    LOG.error("Can't load demo data. Run demos/generate_region_demo_data.py")
    raise

period = 0.00048828125 #s

#Initialse a default surface
default_cortex = surfaces_datatypes.Cortex()

#Put the data into a TimeSeriesSurface datatype
tsr = TimeSeriesSurface(surface = default_cortex,
                        data = data,
                        sample_period = period)
tsr.configure()

#Create and run the analyser
pca_analyser = pca.PCA(time_series = tsr)
pca_data = pca_analyser.evaluate()

#Generate derived data, such as, compnent time series, etc.
pca_data.configure()

#Put the data into a TimeSeriesSurface datatype
component_tsr = TimeSeriesSurface(surface = default_cortex,
                                  data = pca_data.component_time_series,
                                  sample_period = period)
component_tsr.configure()
Ejemplo n.º 7
0
    def __init__(self, input=numpy.array([[], []]), **kwargs):
        if isinstance(input, (Timeseries, TimeSeries)):

            if isinstance(input, Timeseries):
                self._tvb = deepcopy(input._tvb)
                self.ts_type = str(input.ts_type)

            elif isinstance(input, TimeSeries):
                self._tvb = deepcopy(input)
                if isinstance(input, TimeSeriesRegion):
                    self.ts_type = "Region"
                if isinstance(input, TimeSeriesSEEG):
                    self.ts_type = "SEEG"
                elif isinstance(input, TimeSeriesEEG):
                    self.ts_type = "EEG"
                elif isinstance(input, TimeSeriesMEG):
                    self.ts_type = "MEG"
                elif isinstance(input, TimeSeriesEEG):
                    self.ts_type = "EEG"
                elif isinstance(input, TimeSeriesVolume):
                    self.ts_type = "Volume"
                elif isinstance(input, TimeSeriesSurface):
                    self.ts_type = "Surface"
                else:
                    self.ts_type = ""
                    warning(
                        "Input TimeSeries %s is not one of the known TVB TimeSeries classes!"
                        % str(input))
            for attr, value in kwargs.items():
                try:
                    setattr(self, attr, value)
                except:
                    setattr(self._tvb, attr, value)

        elif isinstance(input, numpy.ndarray):
            input = prepare_4D(input, self.logger)
            time = kwargs.pop("time", None)
            if time is not None:
                start_time = float(
                    kwargs.pop("start_time", kwargs.pop("start_time",
                                                        time[0])))
                sample_period = float(
                    kwargs.pop(
                        "sample_period",
                        kwargs.pop("sample_period",
                                   numpy.mean(numpy.diff(time)))))
                kwargs.update({
                    "start_time": start_time,
                    "sample_period": sample_period
                })

            # Initialize
            self.ts_type = kwargs.pop("ts_type", "Region")
            labels_ordering = kwargs.get("labels_ordering", None)

            # Get input sensors if any
            input_sensors = None
            if isinstance(kwargs.get("sensors", None), (TVBSensors, Sensors)):
                if isinstance(kwargs["sensors"], Sensors):
                    input_sensors = kwargs["sensors"]._tvb
                    self.ts_type = "%s sensor" % input_sensors.sensors_type
                    kwargs.update({"sensors": input_sensors})
                else:
                    input_sensors = kwargs["sensors"]

            # Create Timeseries
            if isinstance(input_sensors, TVBSensors) or \
                    self.ts_type in ["SEEG sensor", "Internal sensor", "EEG sensor", "MEG sensor"]:
                # ...for Sensor Timeseries
                if labels_ordering is None:
                    labels_ordering = LABELS_ORDERING
                    labels_ordering[2] = "%s sensor" % self.ts_type
                    kwargs.update({"labels_ordering": labels_ordering})
                if isinstance(input_sensors, TVBSensorsInternal) or isequal_string(self.ts_type, "Internal sensor")\
                        or isequal_string(self.ts_type, "SEEG sensor"):
                    self._tvb = TimeSeriesSEEG(data=input, **kwargs)
                    self.ts_type = "SEEG sensor"
                elif isinstance(input_sensors,
                                TVBSensorsEEG) or isequal_string(
                                    self.ts_type, "EEG sensor"):
                    self._tvb = TimeSeriesEEG(data=input, **kwargs)
                    self.ts_type = "EEG sensor"
                elif isinstance(input_sensors,
                                TVBSensorsMEG) or isequal_string(
                                    self.ts_type, "MEG sensor"):
                    self._tvb = TimeSeriesMEG(data=input, **kwargs)
                    self.ts_type = "MEG sensor"
                else:
                    raise_value_error(
                        "Not recognizing sensors of type %s:\n%s" %
                        (self.ts_type, str(input_sensors)))
            else:
                input_surface = kwargs.pop("surface", None)
                if isinstance(
                        input_surface,
                    (Surface, TVBSurface)) or self.ts_type == "Surface":
                    self.ts_type = "Surface"
                    if isinstance(input_surface, Surface):
                        kwargs.update({"surface": input_surface._tvb})
                    else:
                        kwargs.update({"surface": input_surface})
                    if labels_ordering is None:
                        labels_ordering = LABELS_ORDERING
                        labels_ordering[2] = "Vertex"
                        kwargs.update({"labels_ordering": labels_ordering})
                    self._tvb = TimeSeriesSurface(data=input, **kwargs)
                elif isequal_string(self.ts_type, "Region"):
                    if labels_ordering is None:
                        labels_ordering = LABELS_ORDERING
                        labels_ordering[2] = "Region"
                        kwargs.update({"labels_ordering": labels_ordering})
                    self._tvb = TimeSeriesRegion(data=input,
                                                 **kwargs)  # , **kwargs
                elif isequal_string(self.ts_type, "Volume"):
                    if labels_ordering is None:
                        labels_ordering = ["Time", "X", "Y", "Z"]
                        kwargs.update({"labels_ordering": labels_ordering})
                    self._tvb = TimeSeriesVolume(data=input, **kwargs)
                else:
                    self._tvb = TimeSeries(data=input, **kwargs)

            if not numpy.all([
                    dim_label in self._tvb.labels_dimensions.keys()
                    for dim_label in self._tvb.labels_ordering
            ]):
                warning(
                    "Lack of correspondance between timeseries labels_ordering %s\n"
                    "and labels_dimensions!: %s" %
                    (self._tvb.labels_ordering,
                     self._tvb.labels_dimensions.keys()))

        self._tvb.configure()
        self.configure_time()
        self.configure_sample_rate()
        if len(self.title) == 0:
            self._tvb.title = "%s Time Series" % self.ts_type
Ejemplo n.º 8
0
class GIFTIParser():
    """
        This class reads content of a GIFTI file and builds / returns a Surface instance 
        filled with details.
    """
    UNIQUE_ID_ATTR = "UniqueID"
    SUBJECT_ATTR = "SubjectID"
    ASP_ATTR = "AnatomicalStructurePrimary"
    DATE_ATTR = "Date"
    DESCRIPTION_ATTR = "Description"
    NAME_ATTR = "Name"
    TIME_STEP_ATTR = "TimeStep"
    
    def __init__(self, storage_path, operation_id):
        self.logger = get_logger(__name__)
        self.storage_path = storage_path
        self.operation_id = operation_id
        
    def parse(self, data_file):
        """
            Parse NIFTI file and returns TimeSeries for it. 
        """
        self.logger.debug("Start to parse GIFTI file: %s"%data_file)
        if data_file is None:
            raise ParseException ("Please select GIFTI file which contains data to import")
        if not os.path.exists(data_file):
            raise ParseException ("Provided file %s does not exists"%data_file)
        try:
            gifti_image = giftiio.read(data_file)
            self.logger.debug("File parsed successfully")
        except Exception, excep:
            self.logger.exception(excep)
            msg = "File: %s does not have a valid GIFTI format." % data_file
            raise ParseException(msg)
        
        
        # Now try to determine what data is stored in GIFTI file
        data_arrays = gifti_image.darrays
        
        self.logger.debug("Determine data type stored in GIFTI file")
        
        # First check if it's a surface
        if (len(data_arrays) == 2 
            and intent_codes.code["NIFTI_INTENT_POINTSET"] == data_arrays[0].intent
            and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype
            and intent_codes.code["NIFTI_INTENT_TRIANGLE"] == data_arrays[1].intent
            and data_type_codes.code["NIFTI_TYPE_INT32"] == data_arrays[1].datatype):
            
            # Now try to determine what type of surface we have
            data_array_meta = data_arrays[0].meta
            surface = None
            gid = None
            subject = None
            title = None
            
            if (data_array_meta is not None and data_array_meta.data is not None 
                   and len(data_array_meta.data) > 0):
                anatomical_structure_primary = None
                
                for meta_pair in data_array_meta.data:
                    if meta_pair.name == self.ASP_ATTR:
                        anatomical_structure_primary = meta_pair.value
                    elif meta_pair.name == self.UNIQUE_ID_ATTR:
                        gid = meta_pair.value.replace("{", "").replace("}", "")
                    elif meta_pair.name == self.SUBJECT_ATTR:
                        subject = meta_pair.value
                    elif meta_pair.name == self.NAME_ATTR:
                        title = meta_pair.value
                        
                # Based on info found in meta create correct surface type
                if anatomical_structure_primary == "Head":
                    surface = SkinAir()
                elif anatomical_structure_primary.startswith("Cortex"):
                    surface = CorticalSurface()
                
            
            if surface is None:
                raise ParseException("Could not determine type of the surface")
            
            # Now fill TVB data type with info
            if gid is not None:
                surface.gid = gid
            
            surface.storage_path = self.storage_path
            surface.set_operation_id(self.operation_id)
            
            surface.zero_based_triangles = True
            surface.vertices = data_arrays[0].data
            surface.triangles = data_arrays[1].data
            
            if subject is not None:
                surface.subject = subject
            if title is not None:
                surface.title = title
            
            return surface
        
        elif(len(data_arrays) > 1 
            and intent_codes.code["NIFTI_INTENT_TIME_SERIES"] == data_arrays[0].intent
            and data_type_codes.code["NIFTI_TYPE_FLOAT32"] == data_arrays[0].datatype):
            
            # Create TVB time series to be filled
            time_series = TimeSeriesSurface()
            time_series.storage_path = self.storage_path
            time_series.set_operation_id(self.operation_id)
            time_series.start_time = 0.0
            time_series.sample_period = 1.0

            # First process first data_array and extract important data from it's metadata            
            data_array_meta = data_arrays[0].meta
            if (data_array_meta is not None and data_array_meta.data is not None 
                   and len(data_array_meta.data) > 0):
                for meta_pair in data_array_meta.data:
                    if meta_pair.name == self.UNIQUE_ID_ATTR:
                        gid = meta_pair.value.replace("{", "").replace("}", "")
                        if gid is not None and len(gid) > 0:
                            time_series.gid = gid
                    elif meta_pair.name == self.SUBJECT_ATTR:
                        time_series.subject = meta_pair.value
                    elif meta_pair.name == self.NAME_ATTR:
                        time_series.title = meta_pair.value
                    elif meta_pair.name == self.TIME_STEP_ATTR:
                        time_series.sample_period = float(meta_pair.value)
            
            
            # Now read time series data
            for data_array in data_arrays:
                time_series.write_data_slice([data_array.data])
            
            # Close file after writing data
            time_series.close_file()
            
            return time_series
        else:
            raise ParseException("Could not map data from GIFTI file to a TVB data type")