Ejemplo n.º 1
0
class RegionMatTimeSeriesImporterModel(UploaderViewModel):
    data_file = Str(label='Please select file to import')

    dataset_name = Str(label='Matlab dataset name',
                       doc='Name of the MATLAB dataset where data is stored')

    structure_path = Str(
        required=False,
        default='',
        label='For nested structures enter the field path (separated by .)')

    transpose = Attr(
        field_type=bool,
        required=False,
        default=False,
        label='Transpose the array. Expected shape is (time, channel)')

    slice = Str(
        required=False,
        default='',
        label=
        'Slice of the array in numpy syntax. Expected shape is (time, channel)'
    )

    sampling_rate = Int(required=False,
                        default=100,
                        label='sampling rate (Hz)')

    start_time = Int(default=0, label='starting time (ms)')

    datatype = DataTypeGidAttr(linked_datatype=Connectivity,
                               label='Connectivity')
Ejemplo n.º 2
0
class AllenConnectModel(ViewModel):

    resolution = Int(label="Spatial resolution (micron)",
                     default=list(RESOLUTION_OPTIONS.values())[2],
                     choices=RESOLUTION_OPTIONS.values(),
                     required=True,
                     doc="""Definition of the weights of the connectivity :""")

    weighting = Int(label="Definition of the weights of the connectivity :",
                    default=list(WEIGHTS_OPTIONS.values())[0],
                    choices=WEIGHTS_OPTIONS.values(),
                    required=True,
                    doc="""""")

    inj_f_thresh = Float(
        label="Injected percentage of voxels in the inj site",
        default=80,
        required=True,
        doc=
        """To build the volume and the connectivity select only the areas that have a volume 
        greater than (micron^3): """)

    vol_thresh = Float(
        label="Min volume",
        default=1000000000,
        required=True,
        doc=
        """To build the connectivity select only the experiment where the percentage of infected voxels 
        in the injection structure is greater than: """)
Ejemplo n.º 3
0
    def __init__(self, path):
        super(SurfaceH5, self).__init__(path)
        self.vertices = DataSet(Surface.vertices, self)
        self.triangles = DataSet(Surface.triangles, self)
        self.vertex_normals = DataSet(Surface.vertex_normals, self)
        self.triangle_normals = DataSet(Surface.triangle_normals, self)
        self.number_of_vertices = Scalar(Surface.number_of_vertices, self)
        self.number_of_triangles = Scalar(Surface.number_of_triangles, self)
        self.edge_mean_length = Scalar(Surface.edge_mean_length, self)
        self.edge_min_length = Scalar(Surface.edge_min_length, self)
        self.edge_max_length = Scalar(Surface.edge_max_length, self)
        self.zero_based_triangles = Scalar(Surface.zero_based_triangles, self)

        self.split_triangles = DataSet(NArray(dtype=int), self, name="split_triangles")
        self.number_of_split_slices = Scalar(Int(), self, name="number_of_split_slices")
        self.split_slices = Json(Attr(field_type=dict), self, name="split_slices")

        self.bi_hemispheric = Scalar(Surface.bi_hemispheric, self)
        self.surface_type = Scalar(Surface.surface_type, self)
        self.valid_for_simulations = Scalar(Surface.valid_for_simulations, self)

        # cached header like information, needed to interpret the rest of the file
        # Load the data that is required in order to interpret the file format
        # number_of_vertices and split_slices are needed for the get_vertices_slice read call

        if not self.is_new_file:
            self._split_slices = self.split_slices.load()
            self._split_triangles = self.split_triangles.load()
            self._number_of_vertices = self.number_of_vertices.load()
            self._number_of_triangles = self.number_of_triangles.load()
            self._number_of_split_slices = self.number_of_split_slices.load()
            self._bi_hemispheric = self.bi_hemispheric.load()
Ejemplo n.º 4
0
class TimeseriesMetricsAdapterModel(ViewModel):
    time_series = DataTypeGidAttr(
        linked_datatype=TimeSeries,
        label="Time Series",
        required=True,
        doc="The TimeSeries for which the metric(s) will be computed.")

    algorithms = List(
        of=str,
        choices=tuple(ALGORITHMS.keys()),
        label='Selected metrics to be applied',
        doc=
        'The selected algorithms will all be applied on the input TimeSeries')

    start_point = Float(
        label="Start point (ms)",
        default=500.0,
        required=False,
        doc=""" The start point determines how many points of the TimeSeries will
        be discarded before computing the metric. By default it drops the
        first 500 ms.""")

    segment = Int(
        label="Segmentation factor",
        default=4,
        required=False,
        doc=
        """ Divide the input time-series into discrete equally sized sequences and
        use the last segment to compute the metric. It is only used when
        the start point is larger than the time-series length.""")
Ejemplo n.º 5
0
class PropsDataType(HasTraits):
    n_node = Int()

    def __init__(self, **kwargs):
        super(PropsDataType, self).__init__(**kwargs)
        self._weights = None

    @trait_property(NArray(shape=(Dim.any, Dim.any)))
    def weights(self):
        return self._weights

    @weights.setter
    def weights(self, val):
        if val.shape != (self.n_node, self.n_node):
            raise TraitValueError
        self._weights = val

    @trait_property(Attr(bool))
    def is_directed(self):
        isit = (self.weights == self.weights.T).all()
        # The strict typing is fighting against python conventions
        # numpy.bool_ is not bool ...
        return bool(isit)

    @cached_trait_property(NArray())
    def once(self):
        return self.weights * 22.44
Ejemplo n.º 6
0
class CoherenceSpectrum(HasTraits):
    """
    Result of a NodeCoherence Analysis.
    """
    # Overwrite attribute from superclass
    array_data = NArray()

    source = Attr(
        field_type=time_series.TimeSeries,
        label="Source time-series",
        doc="""Links to the time-series on which the node_coherence is
            applied.""")

    nfft = Int(
        label="Data-points per block",
        default=256,
        doc="""NOTE: must be a power of 2""")

    frequency = NArray(label="Frequency")

    def summary_info(self):
        """
        Gather scientifically interesting summary information from an instance of this datatype.
        """
        return {
            "Spectral type": self.__class__.__name__,
            "Source": self.source.title,
            "Number of frequencies": self.frequency.shape[0],
            "Minimum frequency": self.frequency[0],
            "Maximum frequency": self.frequency[-1],
            "FFT length (time-points)": self.nfft
        }
Ejemplo n.º 7
0
    def __init__(self, path):
        super(TimeSeriesH5, self).__init__(path)
        self.title = Scalar(TimeSeries.title, self)
        self.data = DataSet(TimeSeries.data, self, expand_dimension=0)
        self.nr_dimensions = Scalar(Int(), self, name="nr_dimensions")

        # omitted length_nd , these are indexing props, to be removed from datatype too
        self.labels_ordering = Json(TimeSeries.labels_ordering, self)
        self.labels_dimensions = Json(TimeSeries.labels_dimensions, self)

        self.time = DataSet(TimeSeries.time, self, expand_dimension=0)
        self.start_time = Scalar(TimeSeries.start_time, self)
        self.sample_period = Scalar(TimeSeries.sample_period, self)
        self.sample_period_unit = Scalar(TimeSeries.sample_period_unit, self)
        self.sample_rate = Scalar(Float(), self, name="sample_rate")

        # omitted has_surface_mapping, has_volume_mapping, indexing props, to be removed fro datatype too

        # experiment: load header data eagerly, see surface for a lazy approach
        # as we do not explicitly make a difference between opening for read or write
        # the file might not yet exist, so loading headers makes no sense

        if not self.is_new_file:
            self._sample_period = self.sample_period.load()
            self._start_time = self.start_time.load()
Ejemplo n.º 8
0
    def test_int_field_optinal(self):
        int_attr = Int(label='Dummy Int', default=0, required=False)
        int_field = IntField(int_attr, self.name)

        post_data = {'dummy_name': ''}
        int_field.fill_from_post(post_data)
        assert int_field.data is None, "Empty data was not set correctly on IntField!"
        assert int_field.value == ''
Ejemplo n.º 9
0
    def test_int_field_required_empty(self):
        int_attr = Int(label='Dummy Int', default=0)
        int_field = IntField(int_attr, self.name)

        post_data = {'dummy_name': ''}
        int_field.fill_from_post(post_data)
        assert int_field.validate() is False, "Validation should have failed on IntField!"
        assert int_field.value == ''
Ejemplo n.º 10
0
    def test_int_field_required(self):
        int_attr = Int(label='Dummy Int', default=0)
        int_field = IntField(int_attr, self.name)

        post_data = {'dummy_name': '10'}
        int_field.fill_from_post(post_data)
        assert int_field.data == 10, "Int data was not set correctly on IntField!"
        assert int_field.value == int_field.data, "Int data was not set correctly on IntField!"
Ejemplo n.º 11
0
class NodeCoherence(HasTraits):
    "Adapter for cross-coherence algorithm(s)"

    time_series = Attr(
        field_type=time_series.TimeSeries,
        label="Time Series",
        required=True,
        doc="""The timeseries to which the FFT is to be applied.""")

    nfft = Int(label="Data-points per block",
               default=256,
               doc="""Should be a power of 2...""")

    def evaluate(self):
        "Evaluate coherence on time series."
        cls_attr_name = self.__class__.__name__ + ".time_series"
        # self.time_series.trait["data"].log_debug(owner=cls_attr_name)
        srate = self.time_series.sample_rate
        coh, freq = coherence(self.time_series.data, srate, nfft=self.nfft)
        self.log.debug("coherence")
        self.log.debug(narray_describe(coh))
        self.log.debug("freq")
        self.log.debug(narray_describe(freq))

        spec = spectral.CoherenceSpectrum(source=self.time_series,
                                          nfft=self.nfft,
                                          array_data=coh.astype(numpy.float),
                                          frequency=freq)
        return spec

    def result_shape(self, input_shape):
        """Returns the shape of the main result of NodeCoherence."""
        freq_len = self.nfft / 2 + 1
        freq_shape = (freq_len, )
        result_shape = (freq_len, input_shape[2], input_shape[2],
                        input_shape[1], input_shape[3])
        return [result_shape, freq_shape]

    def result_size(self, input_shape):
        """
        Returns the storage size in Bytes of the main result of NodeCoherence.
        """
        # TODO This depends on input array dtype!
        result_size = numpy.sum(
            list(map(numpy.prod,
                     self.result_shape(input_shape)))) * 8.0  #Bytes
        return result_size

    def extended_result_size(self, input_shape):
        """
        Returns the storage size in Bytes of the extended result of the FFT.
        That is, it includes storage of the evaluated FourierSpectrum attributes
        such as power, phase, amplitude, etc.
        """
        extend_size = self.result_size(
            input_shape)  #Currently no derived attributes.
        return extend_size
Ejemplo n.º 12
0
    def __init__(self, pse_param1, pse_param2):
        # type: (RangeParameter, RangeParameter) -> None
        super(SimulatorPSERangeFragment, self).__init__()
        self._add_pse_field(pse_param1)
        if pse_param2:
            self._add_pse_field(pse_param2, self.KEY_PARAM2)

        self.max_pse_number = HiddenField(
            Int(default=TvbProfile.current.MAX_RANGE_NUMBER, required=False),
            "max_range_number")
Ejemplo n.º 13
0
class ICAAdapterModel(ViewModel):
    time_series = DataTypeGidAttr(
        linked_datatype=TimeSeries,
        label="Time Series",
        required=True,
        doc="The timeseries to which the ICA is to be applied.")

    n_components = Int(label="Number of principal components to unmix.",
                       required=False,
                       default=None,
                       doc="Number of principal components to unmix.")
Ejemplo n.º 14
0
class NodeCoherenceModel(ViewModel):
    time_series = DataTypeGidAttr(
        linked_datatype=TimeSeries,
        label="Time Series",
        required=True,
        doc="""The timeseries to which the Cross Coherence is to be applied."""
    )

    nfft = Int(label="Data-points per block",
               default=256,
               doc="""Should be a power of 2...""")
Ejemplo n.º 15
0
    def __init__(self, path):
        super(SimulationStateH5, self).__init__(path)
        self.history = DataSet(NArray(), self, name='history')
        self.current_state = DataSet(NArray(), self, name='current_state')
        self.current_step = Scalar(Int(), self, name='current_step')

        for i in range(1, 16):
            setattr(self, 'monitor_stock_%i' % i,
                    DataSet(NArray(), self, name='monitor_stock_%i' % i))

        self.integrator_noise_rng_state_algo = Scalar(
            Attr(str), self, name='integrator_noise_rng_state_algo')
        self.integrator_noise_rng_state_keys = DataSet(
            NArray(dtype='uint32'),
            self,
            name='integrator_noise_rng_state_keys')
        self.integrator_noise_rng_state_pos = Scalar(
            Int(), self, name='integrator_noise_rng_state_pos')
        self.integrator_noise_rng_state_has_gauss = Scalar(
            Int(), self, name='integrator_noise_rng_state_has_gauss')
        self.integrator_noise_rng_state_cached_gauss = Scalar(
            Float(), self, name='integrator_noise_rng_state_cached_gauss')
Ejemplo n.º 16
0
    class A(HasTraits):
        n_node = Int()

        def __init__(self, **kwargs):
            super(A, self).__init__(**kwargs)
            self._weights = None

        @trait_property(NArray(shape=(Dim.any, Dim.any)))
        def weights(self):
            return self._weights

        @weights.setter
        def weights(self, val):
            if val.shape != (self.n_node, self.n_node):
                raise TraitValueError
            self._weights = val
Ejemplo n.º 17
0
class BaseTimeseriesMetricAlgorithm(HasTraits):
    """
    This is a base class for all metrics on timeSeries dataTypes.
    Metric means an algorithm computing a single value for an entire TimeSeries.

    """

    time_series = Attr(
        field_type=time_series_module.TimeSeries,
        label="Time Series",
        required=True,
        doc="The TimeSeries for which the metric(s) will be computed.")

    start_point = Float(
        label="Start point (ms)",
        default=500.0,
        required=False,
        doc=""" The start point determines how many points of the TimeSeries will
        be discarded before computing the metric. By default it drops the
        first 500 ms.""")

    segment = Int(
        label="Segmentation factor",
        default=4,
        required=False,
        doc=
        """ Divide the input time-series into discrete equally sized sequences and
        use the last segment to compute the metric. It is only used when
        the start point is larger than the time-series length.""")

    def evaluate(self):
        """
        This method needs to be implemented in each subclass.
        Will describe current algorithm.

        :return: single numeric value or a dictionary (displayLabel: numeric value) to be persisted.
        """
        raise Exception(
            "Every metric algorithm should implement an 'evaluate' method that returns the metric result."
        )
Ejemplo n.º 18
0
class BaBaze(HasTraits):
    s = Attr(str, label='the s', doc='the mighty string')
    sign = Int(label='sign', choices=(-1, 0, 1), default=0)
Ejemplo n.º 19
0
 class A(HasTraits):
     foo = Int(default=1)
Ejemplo n.º 20
0
 class B(HasTraits):
     a = Int(default=1, choices=(3, 4))
Ejemplo n.º 21
0
 class B(HasTraits):
     a = Int(default=1, choices=(1, 3, 4))
     odd_nullable_int = Int(required=False)
Ejemplo n.º 22
0
 class B(HasTraits):
     a = Int(default=1.0)
Ejemplo n.º 23
0
 class B(HasTraits):
     a = Int(field_type=float)
Ejemplo n.º 24
0
class Bar(BaBaze):
    airplane_meal = Attr(str, choices=('cheese', 'chicken'), required=False)
    portions = Int(default=1, label='portions')
    is_fancy = Attr(bool, default=True)
Ejemplo n.º 25
0
class Sensors(HasTraits):
    """
    Base Sensors class.
    All sensors have locations.
    Some will have orientations, e.g. MEG.
    """
    sensors_type = Attr(str, required=False)

    labels = NArray(dtype='U128', label="Sensor labels")

    locations = NArray(label="Sensor locations")

    has_orientation = Attr(field_type=bool, default=False)

    orientations = NArray(required=False)

    number_of_sensors = Int(field_type=int, label="Number of sensors",
                            doc="""The number of sensors described by these Sensors.""")

    # introduced to accommodate real sensors sets which have sensors
    # that should be zero during simulation i.e. ECG (heart), EOG,
    # reference gradiometers, etc.
    usable = NArray(dtype=bool, required=False, label="Usable sensors",
                    doc="The sensors in set which are used for signal data.")

    @classmethod
    def from_file(cls, source_file="eeg_brainstorm_65.txt"):

        result = cls()
        source_full_path = try_get_absolute_path("tvb_data.sensors", source_file)
        reader = FileReader(source_full_path)

        result.labels = reader.read_array(dtype=numpy.str, use_cols=(0,))
        result.locations = reader.read_array(use_cols=(1, 2, 3))
        return result

    def configure(self):
        """
        Invoke the compute methods for computable attributes that haven't been
        set during initialization.
        """
        super(Sensors, self).configure()
        self.number_of_sensors = int(self.labels.shape[0])

    def summary_info(self):
        """
        Gather scientifically interesting summary information from an instance
        of this datatype.
        """
        return {
            "Sensor type": self.sensors_type,
            "Number of Sensors": self.number_of_sensors
        }

    def sensors_to_surface(self, surface_to_map):
        """
        Map EEG sensors onto the head surface (skin-air).

        EEG sensor locations are typically only given on a unit sphere, that is,
        they are effectively only identified by their orientation with respect
        to a coordinate system. This method is used to map these unit vector
        sensor "locations" to a specific location on the surface of the skin.

        Assumes coordinate systems are aligned, i.e. common x,y,z and origin.

        """
        # Normalize sensor and vertex locations to unit vectors
        norm_sensors = numpy.sqrt(numpy.sum(self.locations ** 2, axis=1))
        unit_sensors = self.locations / norm_sensors[:, numpy.newaxis]
        norm_verts = numpy.sqrt(numpy.sum(surface_to_map.vertices ** 2, axis=1))
        unit_vertices = surface_to_map.vertices / norm_verts[:, numpy.newaxis]

        sensor_locations = numpy.zeros((self.number_of_sensors, 3))
        for k in range(self.number_of_sensors):
            # Find the surface vertex most closely aligned with current sensor.
            current_sensor = unit_sensors[k]
            alignment = numpy.dot(current_sensor, unit_vertices.T)
            one_ring = []

            while not one_ring:
                closest_vertex = alignment.argmax()
                # Get the set of triangles in the neighbourhood of that vertex.
                # NOTE: Intersection doesn't always fall within the 1-ring, so, all
                #      triangles contained in the 2-ring are considered.
                one_ring = surface_to_map.vertex_neighbours[closest_vertex]
                if not one_ring:
                    alignment[closest_vertex] = min(alignment)

            local_tri = [surface_to_map.vertex_triangles[v] for v in one_ring]
            local_tri = list(set([tri for subar in local_tri for tri in subar]))

            # Calculate a parametrized plane line intersection [t,u,v] for the
            # set of local triangles, which are considered as defining a plane.
            tuv = numpy.zeros((len(local_tri), 3))
            for i, tri in enumerate(local_tri):
                edge_01 = (surface_to_map.vertices[surface_to_map.triangles[tri, 0]] -
                           surface_to_map.vertices[surface_to_map.triangles[tri, 1]])
                edge_02 = (surface_to_map.vertices[surface_to_map.triangles[tri, 0]] -
                           surface_to_map.vertices[surface_to_map.triangles[tri, 2]])
                see_mat = numpy.vstack((current_sensor, edge_01, edge_02))

                tuv[i] = numpy.linalg.solve(see_mat.T, surface_to_map.vertices[surface_to_map.triangles[tri, 0].T])

            # Find  which line-plane intersection falls within its triangle
            # by imposing the condition that u, v, & u+v are contained in [0 1]
            local_triangle_index = ((0 <= tuv[:, 1]) * (tuv[:, 1] < 1) *
                                    (0 <= tuv[:, 2]) * (tuv[:, 2] < 1) *
                                    (0 <= (tuv[:, 1] + tuv[:, 2])) * ((tuv[:, 1] + tuv[:, 2]) < 2)).nonzero()[0]

            if len(local_triangle_index) == 1:
                # Scale sensor unit vector by t so that it lies on the surface.
                sensor_locations[k] = current_sensor * tuv[local_triangle_index[0], 0]

            elif len(local_triangle_index) < 1:
                # No triangle was found in proximity. Draw the sensor somehow in the surface extension area
                self.log.warning("Could not find a proper position on the given surface for sensor %d:%s. "
                                 "with direction %s" % (k, self.labels[k], str(self.locations[k])))
                distances = (abs(tuv[:, 1] + tuv[:, 2]))
                local_triangle_index = distances.argmin()
                # Scale sensor unit vector by t so that it lies on the surface.
                sensor_locations[k] = current_sensor * tuv[local_triangle_index, 0]

            else:
                # More than one triangle was found in proximity. Pick the first.
                # Scale sensor unit vector by t so that it lies on the surface.
                sensor_locations[k] = current_sensor * tuv[local_triangle_index[0], 0]

        return sensor_locations
Ejemplo n.º 26
0
class Connectivity(HasTraits):
    region_labels = NArray(
        dtype='U128',
        label="Region labels",
        doc=
        """Short strings, 'labels', for the regions represented by the connectivity matrix."""
    )

    weights = NArray(
        label="Connection strengths",
        doc=
        """Matrix of values representing the strength of connections between regions, arbitrary units."""
    )

    undirected = Attr(
        field_type=bool,
        default=False,
        required=False,
        doc=
        "1, when the weights matrix is square and symmetric over the main diagonal, 0 when directed graph."
    )

    tract_lengths = NArray(
        label="Tract lengths",
        doc="""The length of myelinated fibre tracts between regions.
            If not provided Euclidean distance between region centres is used."""
    )

    speed = NArray(
        label="Conduction speed",
        default=numpy.array([3.0]),
        doc=
        """A single number or matrix of conduction speeds for the myelinated fibre tracts between regions."""
    )

    centres = NArray(
        label="Region centres",
        doc="An array specifying the location of the centre of each region.")

    cortical = NArray(
        dtype=bool,
        label="Cortical",
        required=False,
        doc=
        """A boolean vector specifying whether or not a region is part of the cortex."""
    )

    hemispheres = NArray(
        dtype=bool,
        label="Hemispheres (True for Right and False for Left Hemisphere",
        required=False,
        doc=
        """A boolean vector specifying whether or not a region is part of the right hemisphere"""
    )

    orientations = NArray(
        label="Average region orientation",
        required=False,
        doc=
        """Unit vectors of the average orientation of the regions represented in the connectivity matrix.
            NOTE: Unknown data should be zeros.""")

    areas = NArray(
        label="Area of regions",
        required=False,
        doc=
        """Estimated area represented by the regions in the connectivity matrix.
            NOTE: Unknown data should be zeros.""")

    idelays = NArray(
        dtype=int,
        label="Conduction delay indices",
        required=False,
        doc="An array of time delays between regions in integration steps.")

    delays = NArray(
        label="Conduction delay",
        required=False,
        doc=
        """Matrix of time delays between regions in physical units, setting conduction speed automatically
            combines with tract lengths to update this matrix, i.e. don't try and change it manually."""
    )

    number_of_regions = Int(
        field_type=int,
        label="Number of regions",
        doc="""The number of regions represented in this Connectivity """)

    number_of_connections = Int(
        field_type=int,
        label="Number of connections",
        doc=
        """The number of non-zero entries represented in this Connectivity """)

    # Original Connectivity, from which current connectivity was edited.
    parent_connectivity = Attr(field_type=str, required=False)

    # In case of edited Connectivity, this are the nodes left in interest area,
    # the rest were part of a lesion, so they were removed.
    saved_selection = List(of=int)

    @property
    def saved_selection_labels(self):
        """
        Taking the entity field saved_selection, convert indexes in that array
        into labels.
        """
        if self.saved_selection:
            idxs = [int(i) for i in self.saved_selection]
            result = ''
            for i in idxs:
                result += self.region_labels[i] + ','
            return result[:-1]
        else:
            return ''

    def is_right_hemisphere(self, idx):
        """
        :param idx:  Region IDX
        :return: True when hemispheres information is present and it shows that the current node is in the right
        hemisphere. When hemispheres info is not present, return True for the second half of the indices and
        False otherwise.
        """
        if self.hemispheres is not None and self.hemispheres.size:
            return self.hemispheres[idx]
        return idx >= self.number_of_regions / 2

    @property
    def hemisphere_order_indices(self):
        """
        A sequence of indices of rows/colums.
        These permute rows/columns so that the first half would belong to the first hemisphere
        If there is no hemisphere information returns the identity permutation
        """
        if self.hemispheres is not None and self.hemispheres.size:
            li, ri = [], []
            for i, is_right in enumerate(self.hemispheres):
                if is_right:
                    ri.append(i)
                else:
                    li.append(i)
            return numpy.array(li + ri)
        else:
            return numpy.arange(self.number_of_regions)

    @property
    def ordered_weights(self):
        """
        This view of the weights matrix lists all left hemisphere nodes before the right ones.
        It is used by viewers of the connectivity.
        """
        permutation = self.hemisphere_order_indices
        # how this works:
        # w[permutation, :] selects all rows at the indices present in the permutation array thus permuting the rows
        # [:, permutation] does the same to columns. See numpy index arrays
        return self.weights[permutation, :][:, permutation]

    @property
    def ordered_tracts(self):
        """
        Similar to :meth:`ordered_weights`
        """
        permutation = self.hemisphere_order_indices
        return self.tract_lengths[permutation, :][:, permutation]

    @property
    def ordered_labels(self):
        """
        Similar to :meth:`ordered_weights`
        """
        permutation = self.hemisphere_order_indices
        return self.region_labels[permutation]

    @property
    def ordered_centres(self):
        """
        Similar to :method:`ordered_weights`
        """
        permutation = self.hemisphere_order_indices
        return self.centres[permutation]

    def get_grouped_space_labels(self):
        """
        :return: A list [('left', [lh_labels)], ('right': [rh_labels])]
        """
        if self.hemispheres is not None and self.hemispheres.size:
            l, r = [], []

            for i, (is_right, label) in enumerate(
                    zip(self.hemispheres, self.region_labels)):
                if is_right:
                    r.append((i, label))
                else:
                    l.append((i, label))
            return [('Left', l), ('Right', r)]
        else:
            return [('', list(enumerate(self.region_labels)))]

    def get_default_selection(self):
        # should this be sub-selection or all always?
        sel = self.saved_selection
        if sel is not None and len(sel) > 0:
            return sel
        else:
            return list(range(len(self.region_labels)))

    @property
    def binarized_weights(self):
        """
        :return: a matrix of he same size as weights, with 1 where weight > 0, and 0 in rest
        """
        result = numpy.zeros_like(self.weights)
        result = numpy.where(self.weights > 0, 1, result)
        return result

    def configure(self):
        """
        Invoke the compute methods for computable attributes that haven't been
        set during initialization.
        """

        self.number_of_regions = int(self.weights.shape[0])
        # NOTE: In numpy 1.8 there is a function called count_zeros
        self.number_of_connections = int(self.weights.nonzero()[0].shape[0])

        if self.tract_lengths is None or self.tract_lengths.size == 0:
            self.compute_tract_lengths()
        if self.region_labels is None or self.region_labels.size == 0:
            self.compute_region_labels()
        if self.hemispheres is None or self.hemispheres.size == 0:
            self.try_compute_hemispheres()

        # This can not go into compute, as it is too complex reference
        # if self.delays.size == 0:
        # TODO: Because delays are stored and loaded the size was never 0.0 and
        #      so this wasn't being run, making the conduction_speed hack on the
        #      simulator non-functional. Inn the longer run it'll probably be
        #      necessary for delays to never be stored but always calculated
        #      from tract-lengths and speed...
        if self.speed is None:  # TODO: this is a hack fix...
            self.log.warning(
                "Connectivity.speed attribute not initialized properly, setting it to 3.0..."
            )
            self.speed = numpy.array(
                [3.0])  # F£$%^&*!!!#self.trait["speed"].value

        # NOTE: Because of the conduction_speed hack for UI this must be evaluated here, even if delays
        # already has a value, otherwise setting speed in the UI has no effect...
        self.delays = self.tract_lengths / self.speed

        if (self.weights.transpose() == self.weights).all():
            self.undirected = True

        self.validate()

    def summary_info(self):
        summary = {
            "Number of regions": self.number_of_regions,
            "Number of connections": self.number_of_connections,
            "Undirected": self.undirected,
        }
        summary.update(narray_summary_info(self.areas, ar_name='areas'))
        summary.update(narray_summary_info(self.weights, ar_name='weights'))
        summary.update(
            narray_summary_info(self.weights[self.weights.nonzero()],
                                ar_name='weights-non-zero',
                                omit_shape=True))
        summary.update(
            narray_summary_info(self.tract_lengths,
                                ar_name='tract_lengths',
                                omit_shape=True))
        summary.update(
            narray_summary_info(
                self.tract_lengths[self.tract_lengths.nonzero()],
                ar_name='tract_lengths-non-zero',
                omit_shape=True))
        summary.update(
            narray_summary_info(self.tract_lengths[self.weights.nonzero()],
                                ar_name='tract_lengths (connections)',
                                omit_shape=True))
        return summary

    def set_idelays(self, dt):
        """
        Convert the time delays between regions in physical units into an array
        of linear indices into the simulator's history attribute.

        args:
            ``dt (float64)``: Length of integration time step...

        Updates attribute:
            ``idelays (numpy.array)``: Transmission delay between brain regions
            in integration steps.
        """
        # Express delays in integration steps
        self.idelays = numpy.rint(self.delays / dt).astype(numpy.int32)
        self.has_delays = self.idelays.any()
        self._horizon = self.idelays.max() + 1
        nn = self.idelays.shape[0]
        self.inodes = numpy.tile(numpy.r_[:nn], (nn, 1))
        self.delay_indices = self.idelays * nn + self.inodes

    def compute_tract_lengths(self):
        """
        If no tract lengths data are available, this can be used to calculate
        the Euclidean distance between region centres to use as a proxy.

        """
        nor = self.number_of_regions
        tract_lengths = numpy.zeros((nor, nor))
        # TODO: redundant by half, do half triangle then flip...
        for region in range(nor):
            temp = self.centres - self.centres[region, :][numpy.newaxis, :]
            tract_lengths[region, :] = numpy.sqrt(numpy.sum(temp**2, axis=1))

        self.tract_lengths = tract_lengths

    def compute_region_labels(self):
        """
        Compute some labers, if missing
        """
        labels = ["region_%03d" % n for n in range(self.number_of_regions)]
        self.region_labels = numpy.array(labels, dtype="128a")

    def try_compute_hemispheres(self):
        """
        If all region labels are prefixed with L or R, then compute hemisphere side with that.
        """
        if self.region_labels is not None and self.region_labels.size > 0:
            hemispheres = []
            # Check if all labels are prefixed with R / L
            for label in self.region_labels:
                if label is not None and label.lower().startswith('r'):
                    hemispheres.append(True)
                elif label is not None and label.lower().startswith('l'):
                    hemispheres.append(False)
                else:
                    hemispheres = None
                    break
            # Check if all labels are sufixed with R / L
            if hemispheres is None:
                hemispheres = []
                for label in self.region_labels:
                    if label is not None and label.lower().endswith('r'):
                        hemispheres.append(True)
                    elif label is not None and label.lower().endswith('l'):
                        hemispheres.append(False)
                    else:
                        hemispheres = None
                        break
            if hemispheres is not None:
                self.hemispheres = numpy.array(hemispheres, dtype=numpy.bool)

    def transform_remove_self_connections(self):
        """
        Remove the values from the main diagonal (self-connections)
        """
        nor = self.number_of_regions
        result = copy(self.weights)
        result = result - result * numpy.eye(nor, nor)
        return result

    def scaled_weights(self, mode='tract'):
        """
        Scale the connection strengths (weights) and return the scaled matrix.
        Three simple types of scaling are supported.
        The ``scaling_mode``  is one of the following:

            'tract': Scale by a value such that the maximum absolute value of a single
                connection is 1.0. (Global scaling)

            'region': Scale by a value such that the maximum absolute value of the
                cumulative input to any region is 1.0. (Global-wise scaling)

            None: does nothing.

        NOTE: Currently multiple 'tract' and/or 'region' scalings without
            intermediate 'none' scaling mode destroy the ability to recover
            the original un-scaled weights matrix.

        """
        # NOTE: It is not yet clear how or if we will integrate this functinality
        #      into the UI. Currently the same effect can be achieved manually
        #      by using the coupling functions, it is just that, in certain
        #      situations, things are simplified by starting from a normalised
        #      weights matrix. However, in other situations it is not desirable
        #      to have a simple normalisation of this sort.
        # NOTE: We should probably separate the two cases implemented here into
        #      'scaling' and 'normalisation'. Normalisation implies that the norm
        #      of the samples is equal to 1, while here it is only scaling by a factor.

        self.log.info("Starting to normalize to mode: %s" % str(mode))

        normalisation_factor = None
        if mode in ("tract", "edge"):
            # global scaling
            normalisation_factor = numpy.abs(self.weights).max()
        elif mode in ("region", "node"):
            # node-wise scaling
            normalisation_factor = numpy.max(
                numpy.abs(self.weights.sum(axis=1)))
        elif mode in (None, "none"):
            normalisation_factor = 1.0
        else:
            self.log.error("Bad weights normalisation mode, must be one of:")
            self.log.error("('tract', 'edge', 'region', 'node', 'none')")
            raise Exception("Bad weights normalisation mode")

        self.log.debug("Normalization factor is: %s" %
                       str(normalisation_factor))
        mask = self.weights != 0.0
        result = copy(self.weights)
        result[mask] = self.weights[mask] / normalisation_factor
        return result

    def transform_binarize_matrix(self):
        """
        Transforms the weights matrix into a binary (unweighted) matrix
        """
        self.log.info("Transforming weighted matrix into unweighted matrix")

        result = copy(self.weights)
        result = numpy.where(result > 0, 1, result)
        return result

    def motif_linear_directed(self,
                              number_of_regions=4,
                              max_radius=100.,
                              return_type=None):
        """
        Generates a linear (open chain) unweighted directed graph with equidistant nodes.
        """

        iu1 = numpy.triu_indices(number_of_regions, 1)
        iu2 = numpy.triu_indices(number_of_regions, 2)

        self.weights = numpy.zeros((number_of_regions, number_of_regions))
        self.weights[iu1] = 1.0
        self.weights[iu2] = 0.0

        self.tract_lengths = max_radius * copy(self.weights)
        self.number_of_regions = number_of_regions
        self.create_region_labels(mode='numeric')

        if return_type is not None:
            return self.weights, self.tract_lengths
        else:
            pass

    def motif_linear_undirected(self, number_of_regions=4, max_radius=42.):
        """
        Generates a linear (open chain) unweighted undirected graph with equidistant nodes.
        """

        self.weights, self.tract_lengths = self.motif_linear_directed(
            number_of_regions=number_of_regions,
            max_radius=max_radius,
            return_type=True)

        self.weights += self.weights.T
        self.tract_lengths += self.tract_lengths.T
        self.number_of_regions = number_of_regions
        self.create_region_labels(mode='numeric')

    def motif_chain_directed(self,
                             number_of_regions=4,
                             max_radius=42.,
                             return_type=None):
        """
        Generates a closed unweighted directed graph with equidistant nodes.
        Depending on the centres it could be a box or a ring.
        """

        self.weights, self.tract_lengths = self.motif_linear_directed(
            number_of_regions=number_of_regions,
            max_radius=max_radius,
            return_type=True)

        self.weights[-1, 0] = 1.0
        self.tract_lengths[-1, 0] = max_radius
        self.number_of_regions = number_of_regions
        self.create_region_labels(mode='numeric')

        if return_type is not None:
            return self.weights, self.tract_lengths
        else:
            pass

    def motif_chain_undirected(self, number_of_regions=4, max_radius=42.):
        """
        Generates a closed unweighted undirected graph with equidistant nodes.
        Depending on the centres it could be a box or a ring.
        """

        self.weights, self.tract_lengths = self.motif_chain_directed(
            number_of_regions=number_of_regions,
            max_radius=max_radius,
            return_type=True)

        self.weights[0, -1] = 1.0
        self.tract_lengths[0, -1] = max_radius
        self.number_of_regions = number_of_regions
        self.create_region_labels(mode='numeric')

    def motif_all_to_all(self, number_of_regions=4, max_radius=42.):
        """
        Generates an all-to-all closed unweighted undirected graph with equidistant nodes.
        Self-connections are not included.
        """

        diagonal_elements = numpy.diag_indices(number_of_regions)

        self.weights = numpy.ones((number_of_regions, number_of_regions))
        self.weights[diagonal_elements] = 0.0
        self.tract_lengths = max_radius * copy(self.weights)
        self.number_of_regions = number_of_regions
        self.create_region_labels(mode='numeric')

    def centres_spherical(self,
                          number_of_regions=4,
                          max_radius=42.,
                          flat=False):
        """
        The nodes positions are distributed on a sphere.
        See: http://mathworld.wolfram.com/SphericalCoordinates.html

        If flat is true, then theta=0.0, the nodes are lying inside a circle.

        r    : radial
        theta: azimuthal
        polar: phi
        """

        # azimuth
        theta = numpy.random.uniform(low=-numpy.pi,
                                     high=numpy.pi,
                                     size=number_of_regions)

        # side of the cube
        u = numpy.random.uniform(low=0.0, high=1.0, size=number_of_regions)

        if flat:
            cosphi = 0.0
        else:
            # cos(elevation)
            cosphi = numpy.random.uniform(low=-1.0,
                                          high=1.0,
                                          size=number_of_regions)

        phi = numpy.arccos(cosphi)
        r = max_radius * pow(u, 1 / 3.0)

        # To Cartesian coordinates
        x = r * numpy.sin(phi) * numpy.cos(theta)
        y = r * numpy.sin(phi) * numpy.sin(theta)
        z = r * numpy.cos(phi)

        self.centres = numpy.array([x, y, z]).T
        norm_xyz = numpy.sqrt(numpy.sum(self.centres**2, axis=0))
        self.orientations = self.centres / norm_xyz[numpy.newaxis, :]

    def centres_toroidal(self,
                         number_of_regions=4,
                         max_radius=77.,
                         min_radius=13.,
                         mu=numpy.pi,
                         kappa=numpy.pi / 6):
        """
        The nodes are lying on  a torus.
        See: http://mathworld.wolfram.com/Torus.html

        """

        u = scipy.stats.vonmises.rvs(kappa, loc=mu, size=number_of_regions)
        v = scipy.stats.vonmises.rvs(kappa, loc=mu, size=number_of_regions)

        # To cartesian coordinates
        x = (max_radius + min_radius * numpy.cos(v)) * numpy.cos(u)
        y = (max_radius + min_radius * numpy.cos(v)) * numpy.sin(u)
        z = min_radius * numpy.sin(v)

        # Tangent vector with respect to max_radius
        tx = -numpy.sin(u)
        ty = -numpy.cos(u)
        tz = 0

        # Tangent vector with respect to min_radius
        sx = -numpy.cos(u) * (-numpy.sin(v))
        sy = numpy.sin(u) * (-numpy.sin(v))
        sz = numpy.cos(v)

        # Normal vector
        nx = ty * sz - tz * sy
        ny = tz * sx - tx * sz
        nz = tx * sy - ty * sx

        # Normalize normal vectors
        norm = numpy.sqrt(nx**2 + ny**2 + nz**2)
        nx /= norm
        ny /= norm
        nz /= norm

        self.orientations = numpy.array([nx, ny, nz]).T
        self.centres = numpy.array([x, y, z]).T

    def centres_annular(self,
                        number_of_regions=4,
                        max_radius=77.,
                        min_radius=13.,
                        mu=numpy.pi,
                        kappa=numpy.pi / 6):
        """
        The nodes are lying inside an annulus.

        """

        r = numpy.random.uniform(low=min_radius,
                                 high=max_radius,
                                 size=number_of_regions)
        theta = scipy.stats.vonmises.rvs(kappa, loc=mu, size=number_of_regions)

        # To cartesian coordinates
        x = r * numpy.cos(theta)
        y = r * numpy.sin(theta)
        z = numpy.zeros(number_of_regions)

        self.centres = numpy.array([x, y, z]).T

    def centres_cubic(self, number_of_regions=4, max_radius=42., flat=False):
        """
        The nodes are positioined in a 3D grid inside the cube centred at the origin and
        with edges parallel to the axes, with an edge length of 2*max_radius.

        """

        # To cartesian coordinates
        x = numpy.linspace(-max_radius, max_radius, number_of_regions)
        y = numpy.linspace(-max_radius, max_radius, number_of_regions)

        if flat:
            z = numpy.zeros(number_of_regions)
        else:
            z = numpy.linspace(-max_radius, max_radius, number_of_regions)

        self.centres = numpy.array([x, y, z]).T

    def generate_surrogate_connectivity(self,
                                        number_of_regions,
                                        motif='chain',
                                        undirected=True,
                                        these_centres='spherical'):
        """
        This one generates some defaults.
        For more specific motifs, generate invoking each method separetly.

        """

        # NOTE: Luckily I went for 5 motifs ...
        if motif == 'chain' and undirected:
            self.motif_chain_undirected(number_of_regions=number_of_regions)
        elif motif == "chain" and not undirected:
            self.motif_chain_directed(number_of_regions=number_of_regions)
        elif motif == 'linear' and undirected:
            self.motif_linear_undirected(number_of_regions=number_of_regions)
        elif motif == 'linear' and not undirected:
            self.motif_linear_directed(number_of_regions=number_of_regions)
        else:
            self.log.info("Generating all-to-all connectivity \\")
            self.motif_all_to_all(number_of_regions=number_of_regions)

        # centres
        if these_centres in ("spherical", "annular", "toroidal", "cubic"):
            eval("self.centres_" + these_centres +
                 "(number_of_regions=number_of_regions)")
        else:
            raise Exception("Bad centres geometry")

    def create_region_labels(self, mode="numeric"):
        """
        Assumes weights already exists
        """

        self.log.info("Create labels: %s" % str(mode))

        if mode in ("numeric", "num"):
            region_labels = [n for n in range(self.number_of_regions)]
            self.region_labels = numpy.array(region_labels).astype(str)
        elif mode in ("alphabetic", "alpha"):
            if self.number_of_regions < 26:
                self.region_labels = numpy.array(
                    list(map(chr, list(range(
                        65, 65 + self.number_of_regions))))).astype(str)
            else:
                self.log.info(
                    "I'm too lazy to create several strategies to label regions. \\"
                )
                self.log.info(
                    "Please choose mode 'numeric' or set your own labels\\")
        else:
            self.log.error("Bad region labels mode, must be one of:")
            self.log.error("('numeric', 'num', 'alphabetic', 'alpha')")
            raise Exception("Bad region labels mode")

    def unmapped_indices(self, region_mapping):
        """
        Compute vector of indices of regions in connectivity which are not in the given
        region mapping.

        """

        return numpy.setdiff1d(numpy.r_[:self.number_of_regions],
                               region_mapping)

    @staticmethod
    def from_file(source_file="connectivity_76.zip"):

        result = Connectivity()
        source_full_path = try_get_absolute_path("tvb_data.connectivity",
                                                 source_file)

        if source_file.endswith(".h5"):
            reader = H5Reader(source_full_path)

            result.weights = reader.read_field("weights")
            result.centres = reader.read_field("centres")
            result.region_labels = reader.read_field("region_labels")
            result.orientations = reader.read_optional_field("orientations")
            result.cortical = reader.read_optional_field("cortical")
            result.hemispheres = reader.read_field("hemispheres")
            result.areas = reader.read_optional_field("areas")
            result.tract_lengths = reader.read_field("tract_lengths")

        else:
            reader = ZipReader(source_full_path)

            result.weights = reader.read_array_from_file("weights")
            if reader.has_file_like("centres"):
                result.centres = reader.read_array_from_file("centres",
                                                             use_cols=(1, 2,
                                                                       3))
                result.region_labels = reader.read_array_from_file(
                    "centres", dtype=numpy.str, use_cols=(0, ))
            else:
                result.centres = reader.read_array_from_file("centers",
                                                             use_cols=(1, 2,
                                                                       3))
                result.region_labels = reader.read_array_from_file(
                    "centers", dtype=numpy.str, use_cols=(0, ))
            result.orientations = reader.read_optional_array_from_file(
                "average_orientations")
            result.cortical = reader.read_optional_array_from_file(
                "cortical", dtype=numpy.bool)
            result.hemispheres = reader.read_optional_array_from_file(
                "hemispheres", dtype=numpy.bool)
            result.areas = reader.read_optional_array_from_file("areas")
            result.tract_lengths = reader.read_array_from_file("tract_lengths")

        return result

    @property
    def horizon(self):
        "The horizon is the maximum number of steps required in memory for simulation."
        return self._horizon
Ejemplo n.º 27
0
class TestModel(ViewModel):
    test1_val1 = Int(default=0)
    test1_val2 = Int(default=0)
Ejemplo n.º 28
0
class PowerSpectraInteractive(HasTraits):
    """
    The graphical interface for visualising the power-spectra (FFT) of a
    timeseries provide controls for setting:

        - which state-variable and mode to display [sets]
        - log or linear scaling for the power or frequency axis [binary]
        - sementation lenth [set]
        - windowing function [set]
        - power normalisation [binary] (emphasise relative frequency contribution)
        - show std or sem [binary]


    """

    time_series = Attr(
        field_type=time_series_datatypes.TimeSeries,
        label="Timeseries",
        default=None,
        doc=""" The timeseries to which the FFT is to be applied.""")

    first_n = Int(
        label="Display the first 'n'",
        default=-1,
        doc="""Primarily intended for displaying the first N components of a 
            surface PCA timeseries. Defaults to -1, meaning it'll display all
            of 'space' (ie, regions or vertices or channels). In other words,
            for Region or M/EEG timeseries you can ignore this, but, for a 
            surface timeseries it really must be set.""")

    def __init__(self, **kwargs):
        """
        Initialise based on provided keywords or their traited defaults. Also,
        initialise the place-holder attributes that aren't filled until the
        show() method is called.

        """
        #figure
        self.ifft_fig = None

        #time-series
        self.fft_ax = None

        #Current state
        self.xscale = "linear"
        self.yscale = "log"
        self.mode = 0
        self.variable = 0
        self.show_sem = False
        self.show_std = False
        self.normalise_power = "no"
        self.window_length = 0.25
        self.window_function = "None"

        #Selectors
        self.xscale_selector = None
        self.yscale_selector = None
        self.mode_selector = None
        self.variable_selector = None
        self.show_sem_selector = None
        self.show_std_selector = None
        self.normalise_power_selector = None
        self.window_length_selector = None
        self.window_function_selector = None

        #
        possible_freq_steps = [2**x for x in range(-2, 7)]  #Hz
        #possible_freq_steps.append(1.0 / self.time_series_length) #Hz
        self.possible_window_lengths = 1.0 / numpy.array(
            possible_freq_steps)  #s
        self.freq_step = 1.0 / self.window_length
        self.frequency = None
        self.spectra = None
        self.spectra_norm = None

        #Sliders
        #self.window_length_slider = None

    def configure(self):
        """ Seperate configure cause ttraits be busted... """
        LOG.debug("time_series shape: %s" % str(self.time_series.data.shape))
        #TODO: if isinstance(self.time_series, TimeSeriesSurface) and self.first_n == -1: #LOG.error, return.
        self.data = self.time_series.data[:, :, :self.first_n, :]
        self.period = self.time_series.sample_period
        self.max_freq = 0.5 / self.period
        self.units = "Hz"
        self.tpts = self.data.shape[0]
        self.nsrs = self.data.shape[2]
        self.time_series_length = self.tpts * self.period
        self.time = numpy.arange(self.tpts) * self.period
        self.labels = ["channel_%0.3d" % k for k in range(self.nsrs)]

    def show(self):
        """ Generate the interactive power-spectra figure. """
        #Make sure everything is configured
        self.configure()

        #Make the figure:
        self.create_figure()

        #Selectors
        self.add_xscale_selector()
        self.add_yscale_selector()
        self.add_mode_selector()
        self.add_variable_selector()
        self.add_normalise_power_selector()
        self.add_window_length_selector()
        self.add_window_function_selector()

        #Sliders
        #self.add_window_length_slider() #Want discrete values
        #self.add_scaling_slider()

        #...
        self.calc_fft()

        #Plot timeseries
        self.plot_spectra()

        pylab.show()

    ##------------------------------------------------------------------------##
    ##------------------ Functions for building the figure -------------------##
    ##------------------------------------------------------------------------##
    def create_figure(self):
        """ Create the figure and time-series axes. """
        time_series_type = self.time_series.__class__.__name__
        try:
            figure_window_title = "Interactive power spectra: " + time_series_type
            pylab.close(figure_window_title)
            self.ifft_fig = pylab.figure(num=figure_window_title,
                                         figsize=(16, 8),
                                         facecolor=BACKGROUNDCOLOUR,
                                         edgecolor=EDGECOLOUR)
        except ValueError:
            LOG.info("My life would be easier if you'd update your PyLab...")
            figure_number = 42
            pylab.close(figure_number)
            self.ifft_fig = pylab.figure(num=figure_number,
                                         figsize=(16, 8),
                                         facecolor=BACKGROUNDCOLOUR,
                                         edgecolor=EDGECOLOUR)

        self.fft_ax = self.ifft_fig.add_axes([0.15, 0.2, 0.7, 0.75])

    def add_xscale_selector(self):
        """
        Add a radio button to the figure for selecting which scaling the x-axes
        should use.
        """
        pos_shp = [0.45, 0.02, 0.05, 0.104]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="xscale")
        xscale_tuple = ("log", "linear")
        self.xscale_selector = widgets.RadioButtons(rax,
                                                    xscale_tuple,
                                                    active=1)
        self.xscale_selector.on_clicked(self.update_xscale)

    def add_yscale_selector(self):
        """
        Add a radio button to the figure for selecting which scaling the y-axes
        should use.
        """
        pos_shp = [0.02, 0.5, 0.05, 0.104]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="yscale")
        yscale_tuple = ("log", "linear")
        self.yscale_selector = widgets.RadioButtons(rax,
                                                    yscale_tuple,
                                                    active=0)
        self.yscale_selector.on_clicked(self.update_yscale)

    def add_mode_selector(self):
        """
        Add a radio button to the figure for selecting which mode of the model
        should be displayed.
        """
        pos_shp = [0.02, 0.07, 0.05, 0.1 + 0.002 * self.data.shape[3]]
        rax = self.ifft_fig.add_axes(pos_shp, facecolor=AXCOLOUR, title="Mode")
        mode_tuple = tuple(range(self.data.shape[3]))
        self.mode_selector = widgets.RadioButtons(rax, mode_tuple, active=0)
        self.mode_selector.on_clicked(self.update_mode)

    def add_variable_selector(self):
        """
        Generate radio selector buttons to set which state variable is 
        displayed.
        """
        noc = self.data.shape[1]  # number of choices
        #State variable for the x axis
        pos_shp = [0.02, 0.22, 0.05, 0.12 + 0.008 * noc]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="state variable")
        self.variable_selector = widgets.RadioButtons(rax,
                                                      tuple(range(noc)),
                                                      active=0)
        self.variable_selector.on_clicked(self.update_variable)

    def add_window_length_selector(self):
        """
        Generate radio selector buttons to set the window length is seconds.
        """
        noc = self.possible_window_lengths.shape[0]  # number of choices
        #State variable for the x axis
        pos_shp = [0.88, 0.07, 0.1, 0.12 + 0.02 * noc]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="Segment length")
        wl_tup = tuple(self.possible_window_lengths)
        self.window_length_selector = widgets.RadioButtons(rax,
                                                           wl_tup,
                                                           active=4)
        self.window_length_selector.on_clicked(self.update_window_length)

    def add_window_function_selector(self):
        """
        Generate radio selector buttons to set the windowing function.
        """
        #TODO: add support for kaiser, requiers specification of beta.
        wf_tup = ("None", "hamming", "bartlett", "blackman", "hanning")
        noc = len(wf_tup)  # number of choices
        #State variable for the x axis
        pos_shp = [0.88, 0.77, 0.085, 0.12 + 0.01 * noc]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="Windowing function")
        self.window_function_selector = widgets.RadioButtons(rax,
                                                             wf_tup,
                                                             active=0)
        self.window_function_selector.on_clicked(self.update_window_function)

    def add_normalise_power_selector(self):
        """
        Add a radio button to chose whether or not the power of all spectra 
        shouold be normalised to 1.
        """
        pos_shp = [0.02, 0.8, 0.05, 0.104]
        rax = self.ifft_fig.add_axes(pos_shp,
                                     facecolor=AXCOLOUR,
                                     title="normalise")
        np_tuple = ("yes", "no")
        self.normalise_power_selector = widgets.RadioButtons(rax,
                                                             np_tuple,
                                                             active=1)
        self.normalise_power_selector.on_clicked(self.update_normalise_power)

    ##------------------------------------------------------------------------##
    ##------------------ Functions for updating the state --------------------##
    ##------------------------------------------------------------------------##
    def calc_fft(self):
        """
        Calculate FFT using current state of the window_length, window_function,
        """
        #Segment time-series, overlapping if necessary
        nseg = int(numpy.ceil(self.time_series_length / self.window_length))
        if nseg != 1:
            seg_tpts = self.window_length / self.period
            overlap = ((seg_tpts * nseg) - self.tpts) / (nseg - 1)
            starts = [
                max(seg * (seg_tpts - overlap), 0) for seg in range(nseg)
            ]
            segments = [self.data[start:start + seg_tpts] for start in starts]
            segments = [
                segment[:, :, :, numpy.newaxis] for segment in segments
            ]
            time_series = numpy.concatenate(segments, axis=4)
        else:
            time_series = self.data[:, :, :, :, numpy.newaxis]
            seg_tpts = time_series.shape[0]

        #Base-line correct segmented time-series
        time_series = time_series - time_series.mean(axis=0)[numpy.newaxis, :]

        #Apply windowing function
        if self.window_function != "None":
            window_function = eval("".join(("numpy.", self.window_function)))
            window_mask = numpy.reshape(window_function(seg_tpts),
                                        (seg_tpts, 1, 1, 1, 1))
            time_series = time_series * window_mask

        #Calculate the FFT
        result = numpy.fft.fft(time_series, axis=0)
        nfreq = len(result) / 2

        self.frequency = numpy.arange(0, self.max_freq, self.freq_step)
        LOG.debug("frequency shape: %s" % str(self.frequency.shape))

        self.spectra = numpy.mean(numpy.abs(result[1:nfreq + 1])**2, axis=-1)
        LOG.debug("spectra shape: %s" % str(self.spectra.shape))

        self.spectra_norm = (self.spectra / numpy.sum(self.spectra, axis=0))
        LOG.debug("spectra_norm shape: %s" % str(self.spectra_norm.shape))

        #import pdb; pdb.set_trace()
#        self.spectra_std = numpy.std(numpy.abs(result[:nfreq]), axis=4)
#        self.spectra_sem = self.spectra_std / time_series.shape[4]

##------------------------------------------------------------------------##
##------------------ Functions for updating the figure -------------------##
##------------------------------------------------------------------------##

    def update_xscale(self, xscale):
        """ 
        Update the FFT axes' xscale to either log or linear based on radio
        button selection.
        """
        self.xscale = xscale
        self.fft_ax.set_xscale(self.xscale)
        pylab.draw()

    def update_yscale(self, yscale):
        """ 
        Update the FFT axes' yscale to either log or linear based on radio
        button selection.
        """
        self.yscale = yscale
        self.fft_ax.set_yscale(self.yscale)
        pylab.draw()

    def update_mode(self, mode):
        """ Update the visualised mode based on radio button selection. """
        self.mode = mode
        self.plot_spectra()

    def update_variable(self, variable):
        """ 
        Update state variable being plotted based on radio buttton selection.
        """
        self.variable = variable
        self.plot_spectra()

    def update_normalise_power(self, normalise_power):
        """ Update whether to normalise based on radio button selection. """
        self.normalise_power = normalise_power
        self.plot_spectra()

    def update_window_length(self, length):
        """
        Update timeseries window length based on the selected value.
        """
        #TODO: need this casting but not sure why, don't need int() with mode...
        self.window_length = numpy.float64(length)
        #import pdb; pdb.set_trace()
        self.freq_step = 1.0 / self.window_length
        self.update_spectra()

    def update_window_function(self, window_function):
        """
        Update windowing function based on the radio button selection.
        """
        self.window_function = window_function
        self.update_spectra()

    def update_spectra(self):
        """ Clear the axes and redraw the power-spectra. """
        self.calc_fft()
        self.plot_spectra()

#    def plot_std(self):
#        """ Plot """
#        std = (self.spectra[:, self.variable, :, self.mode] +
#               self.spectra_std[:, self.variable, :, self.mode])
#        self.fft_ax.plot(self.frequency, std, "--")
#
#
#    def plot_sem(self):
#        """  """
#        sem = (self.spectra[:, self.variable, :, self.mode] +
#               self.spectra_sem[:, self.variable, :, self.mode])
#        self.fft_ax.plot(self.frequency, sem, ":")

    def plot_spectra(self):
        """ Plot the power spectra. """
        self.fft_ax.clear()
        # Set title and axis labels
        time_series_type = self.time_series.__class__.__name__
        self.fft_ax.set(title=time_series_type)
        self.fft_ax.set(xlabel="Frequency (%s)" % self.units)
        self.fft_ax.set(ylabel="Power")

        # Set x and y scale based on curent radio button selection.
        self.fft_ax.set_xscale(self.xscale)
        self.fft_ax.set_yscale(self.yscale)

        if hasattr(self.fft_ax, 'autoscale'):
            self.fft_ax.autoscale(enable=True, axis='both', tight=True)

        #import pdb; pdb.set_trace()
        #Plot the power spectra
        if self.normalise_power == "yes":
            self.fft_ax.plot(self.frequency,
                             self.spectra_norm[:, self.variable, :, self.mode])
        else:
            self.fft_ax.plot(self.frequency, self.spectra[:, self.variable, :,
                                                          self.mode])


#        #TODO: Need to ensure colour matching... and allow region selection.
#        #If requested, add standard deviation
#        if self.show_std:
#            self.plot_std(self)
#
#        #If requested, add standard error in mean
#        if self.show_sem:
#            self.plot_sem(self)

        pylab.draw()
Ejemplo n.º 29
0
class IndependentComponents(HasTraits):
    """
    Result of an Independent Component Analysis.

    """
    source = Attr(field_type=time_series.TimeSeries,
                  label="Source time-series",
                  doc="Links to the time-series on which the ICA is applied.")

    mixing_matrix = NArray(label="Mixing matrix - Spatial Maps",
                           required=False,
                           doc="""The linear mixing matrix (Mixing matrix) """)

    unmixing_matrix = NArray(
        label="Unmixing matrix - Spatial maps",
        doc=
        """The estimated unmixing matrix used to obtain the unmixed sources from the data"""
    )

    prewhitening_matrix = NArray(label="Pre-whitening matrix", doc=""" """)

    n_components = Int(
        label="Number of independent components",
        doc=""" Observed data matrix is considered to be a linear combination
            of :math:`n` non-Gaussian independent components""")

    norm_source = NArray(
        label="Normalised source time series. Zero centered and whitened.")

    component_time_series = NArray(
        label="Component time series. Unmixed sources.")

    normalised_component_time_series = NArray(
        label="Normalised component time series")

    def compute_norm_source(self):
        """Normalised source time-series."""
        self.norm_source = (
            (self.source.data - self.source.data.mean(axis=0)) /
            self.source.data.std(axis=0))

    def compute_component_time_series(self):
        ts_shape = self.source.data.shape
        component_ts_shape = (ts_shape[0], ts_shape[1], self.n_components,
                              ts_shape[3])
        component_ts = numpy.zeros(component_ts_shape)
        for var in range(ts_shape[1]):
            for mode in range(ts_shape[3]):
                w = self.unmixing_matrix[:, :, var, mode]
                k = self.prewhitening_matrix[:, :, var, mode]
                ts = self.source.data[:, var, :, mode]
                component_ts[:, var, :,
                             mode] = numpy.dot(w, numpy.dot(k, ts.T)).T
        self.component_time_series = component_ts

    def compute_normalised_component_time_series(self):
        ts_shape = self.source.data.shape
        component_ts_shape = (ts_shape[0], ts_shape[1], self.n_components,
                              ts_shape[3])
        component_nts = numpy.zeros(component_ts_shape)
        for var in range(ts_shape[1]):
            for mode in range(ts_shape[3]):
                w = self.unmixing_matrix[:, :, var, mode]
                k = self.prewhitening_matrix[:, :, var, mode]
                nts = self.norm_source[:, var, :, mode]
                component_nts[:, var, :,
                              mode] = numpy.dot(w, numpy.dot(k, nts.T)).T
        self.normalised_component_time_series = component_nts

    def compute_mixing_matrix(self):
        """
        Compute the linear mixing matrix A, so X = A * S ,
        where X is the observed data and S contain the independent components
            """
        ts_shape = self.source.data.shape
        mixing_matrix_shape = (ts_shape[2], self.n_components, ts_shape[1],
                               ts_shape[3])
        mixing_matrix = numpy.zeros(mixing_matrix_shape)
        for var in range(ts_shape[1]):
            for mode in range(ts_shape[3]):
                w = self.unmixing_matrix[:, :, var, mode]
                k = self.prewhitening_matrix[:, :, var, mode]
                temp = numpy.matrix(numpy.dot(w, k))
                mixing_matrix[:, :, var, mode] = numpy.array(
                    numpy.dot(temp.T, (numpy.dot(temp, temp.T)).T))
        self.mixing_matrix = mixing_matrix

    def summary_info(self):
        """
        Gather scientifically interesting summary information from an instance
        of this datatype.
        """
        return {
            "Mode decomposition type": self.__class__.__name__,
            "Source": self.source.title
        }
Ejemplo n.º 30
0
 class A(HasTraits):
     a = Int()
     b = Int(field_type=np.int8)
     c = Int(field_type=np.uint16)