def launch(self, time_series):
        """
        Launch algorithm and build results.

        :returns: the `ComplexCoherenceSpectrum` built with the given time-series
        """
        shape = time_series.read_data_shape()
        
        ##------- Prepare a ComplexCoherenceSpectrum object for result -------##
        spectra = ComplexCoherenceSpectrum(source=time_series,
                                           storage_path=self.storage_path)
        
        ##------------------- NOTE: Assumes 4D TimeSeries. -------------------##
        node_slice = [slice(shape[0]), slice(shape[1]), slice(shape[2]), slice(shape[3])]
        
        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        small_ts.sample_rate = time_series.sample_rate
        small_ts.data = time_series.read_data_slice(tuple(node_slice))
        self.algorithm.time_series = small_ts
        
        partial_result = self.algorithm.evaluate()
        LOG.debug("got partial_result")
        LOG.debug("partial segment_length is %s" % (str(partial_result.segment_length)))
        LOG.debug("partial epoch_length is %s" % (str(partial_result.epoch_length)))
        LOG.debug("partial windowing_function is %s" % (str(partial_result.windowing_function)))
        #LOG.debug("partial frequency vector is %s" % (str(partial_result.frequency)))
        
        spectra.write_data_slice(partial_result)
        spectra.segment_length = partial_result.segment_length
        spectra.epoch_length = partial_result.epoch_length
        spectra.windowing_function = partial_result.windowing_function
        #spectra.frequency = partial_result.frequency
        spectra.close_file()
        return spectra
 def launch(self, time_series, nfft=None):
     """ 
     Launch algorithm and build results. 
     """
     ##--------- Prepare a CoherenceSpectrum object for result ------------##
     coherence = CoherenceSpectrum(source=time_series,
                                   nfft=self.algorithm.nfft,
                                   storage_path=self.storage_path)
     
     ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
     node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
     
     ##---------- Iterate over slices and compose final result ------------##
     small_ts = TimeSeries(use_storage=False)
     small_ts.sample_rate = time_series.sample_rate
     partial_coh = None
     for var in range(self.input_shape[1]):
         node_slice[1] = slice(var, var + 1)
         small_ts.data = time_series.read_data_slice(tuple(node_slice))
         self.algorithm.time_series = small_ts
         partial_coh = self.algorithm.evaluate()
         coherence.write_data_slice(partial_coh)
     coherence.frequency = partial_coh.frequency
     coherence.close_file()
     return coherence
    def launch(self, time_series):
        """ 
        Launch algorithm and build results.

        :param time_series: the input time series for which the correlation should be computed
        :returns: the cross correlation for the given time series
        :rtype: `CrossCorrelation`
        """
        ##--------- Prepare a CrossCorrelation object for result ------------##
        cross_corr = CrossCorrelation(source=time_series,
                                      storage_path=self.storage_path)
        
        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        small_ts.sample_period = time_series.sample_period
        partial_cross_corr = None
        for var in range(self.input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_cross_corr = self.algorithm.evaluate()
            cross_corr.write_data_slice(partial_cross_corr)
        cross_corr.time = partial_cross_corr.time
        cross_corr.labels_ordering[1] = time_series.labels_ordering[2]
        cross_corr.labels_ordering[2] = time_series.labels_ordering[2]
        cross_corr.close_file()
        return cross_corr
    def launch(self, time_series, dt=None, bold_model=None, RBM=None, neural_input_transformation=None):
        """
        Launch algorithm and build results.

        :param time_series: the input time-series used as neural activation in the Balloon Model
        :returns: the simulated BOLD signal
        :rtype: `TimeSeries`
        """
        time_line = time_series.read_time_page(0, self.input_shape[0])
        bold_signal = TimeSeriesRegion(storage_path=self.storage_path,
                                       sample_period=time_series.sample_period,
                                       start_time=time_series.start_time,
                                       connectivity=time_series.connectivity)

        ##---------- Iterate over slices and compose final result ------------##

        node_slice = [slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3])]
        small_ts = TimeSeries(use_storage=False, sample_period=time_series.sample_period, time=time_line)
        
        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_bold = self.algorithm.evaluate()
            bold_signal.write_data_slice(partial_bold.data, grow_dimension=2)

        bold_signal.write_time_slice(time_line)
        bold_signal.close_file()
        return bold_signal
 def launch(self, time_series, mother=None, sample_period=None, normalisation=None, q_ratio=None,
            frequencies='Range', frequencies_parameters=None):
     """ 
     Launch algorithm and build results. 
     """
     ##--------- Prepare a WaveletCoefficients object for result ----------##
     frequencies_array = numpy.array([])
     if self.algorithm.frequencies is not None:
         frequencies_array = numpy.array(list(self.algorithm.frequencies))
     wavelet = WaveletCoefficients(source=time_series, mother=self.algorithm.mother, q_ratio=self.algorithm.q_ratio,
                                   sample_period=self.algorithm.sample_period, frequencies=frequencies_array,
                                   normalisation=self.algorithm.normalisation, storage_path=self.storage_path)
     
     ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
     node_slice = [slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3])]
     
     ##---------- Iterate over slices and compose final result ------------##
     small_ts = TimeSeries(use_storage=False)
     small_ts.sample_rate = time_series.sample_rate
     small_ts.sample_period = time_series.sample_period
     for node in range(self.input_shape[2]):
         node_slice[2] = slice(node, node + 1)
         small_ts.data = time_series.read_data_slice(tuple(node_slice))
         self.algorithm.time_series = small_ts
         partial_wavelet = self.algorithm.evaluate()
         wavelet.write_data_slice(partial_wavelet)
     
     wavelet.close_file()
     return wavelet
    def launch(self, view_model):
        # type: (CrossCorrelateAdapterModel) -> [CrossCorrelationIndex]
        """ 
        Launch algorithm and build results.
        Compute the node-pairwise cross-correlation of the source 4D TimeSeries represented by the index given as input.

        Return a CrossCorrelationIndex. Create a CrossCorrelationH5 that contains the cross-correlation
        sequences for all possible combinations of the nodes.

        See: http://www.scipy.org/doc/api_docs/SciPy.signal.signaltools.html#correlate

        :param time_series: the input time series index for which the correlation should be computed
        :returns: the cross correlation index for the given time series
        :rtype: `CrossCorrelationIndex`
        """
        # --------- Prepare CrossCorrelationIndex and CrossCorrelationH5 objects for result ------------##
        cross_corr_index = CrossCorrelationIndex()
        cross_corr_h5_path = h5.path_for(self.storage_path, CrossCorrelationH5, cross_corr_index.gid)
        cross_corr_h5 = CrossCorrelationH5(cross_corr_h5_path)

        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()

        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            small_ts.sample_period = ts_h5.sample_period.load()
            small_ts.sample_period_unit = ts_h5.sample_period_unit.load()
            partial_cross_corr = None
            labels_ordering = ts_h5.labels_ordering.load()
            for var in range(self.input_shape[1]):
                node_slice[1] = slice(var, var + 1)
                small_ts.data = ts_h5.read_data_slice(tuple(node_slice))
                partial_cross_corr = self._compute_cross_correlation(small_ts, ts_h5)
                cross_corr_h5.write_data_slice(partial_cross_corr)
            ts_array_metadata = cross_corr_h5.array_data.get_cached_metadata()

        cross_corr_h5.time.store(partial_cross_corr.time)
        cross_corr_labels_ordering = list(partial_cross_corr.labels_ordering)
        cross_corr_labels_ordering[1] = labels_ordering[2]
        cross_corr_labels_ordering[2] = labels_ordering[2]
        cross_corr_h5.labels_ordering.store(json.dumps(tuple(cross_corr_labels_ordering)))
        cross_corr_h5.source.store(uuid.UUID(self.input_time_series_index.gid))
        cross_corr_h5.gid.store(uuid.UUID(cross_corr_index.gid))

        cross_corr_index.fk_source_gid = self.input_time_series_index.gid
        cross_corr_index.labels_ordering = cross_corr_h5.labels_ordering.load()
        cross_corr_index.type = type(cross_corr_index).__name__
        cross_corr_index.array_data_min = ts_array_metadata.min
        cross_corr_index.array_data_max = ts_array_metadata.max
        cross_corr_index.array_data_mean = ts_array_metadata.mean

        cross_corr_h5.close()
        return cross_corr_index
    def launch(self, time_series, algorithms=None):
        """ 
        Launch algorithm and build results.

        :param time_series: the time series on which the algorithms are run
        :param algorithms:  the algorithms to be run for computing measures on the time series
        :type  algorithms:  any subclass of BaseTimeseriesMetricAlgorithm (KuramotoIndex, \
                    GlobalVariance, VarianceNodeVariance)
        :rtype: `DatatypeMeasure`
        """
        if algorithms is None:
            algorithms = self.available_algorithms.keys()
        shape = time_series.read_data_shape()
        log_debug_array(LOG, time_series, "time_series")

        metrics_results = {}
        for algorithm_name in algorithms:
            ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
            node_slice = [
                slice(shape[0]),
                slice(shape[1]),
                slice(shape[2]),
                slice(shape[3])
            ]

            ##---------- Iterate over slices and compose final result ------------##
            unstored_ts = TimeSeries(use_storage=False)

            unstored_ts.data = time_series.read_data_slice(tuple(node_slice))

            ##-------------------- Fill Algorithm for Analysis -------------------##
            algorithm = self.available_algorithms[algorithm_name](
                time_series=unstored_ts)
            ## Validate that current algorithm's filter is valid.
            if (algorithm.accept_filter is not None and not algorithm.
                    accept_filter.get_python_filter_equivalent(time_series)):
                LOG.warning(
                    'Measure algorithm will not be computed because of incompatibility on input. '
                    'Filters failed on algo: ' + str(algorithm_name))
                continue
            else:
                LOG.debug("Applying measure: " + str(algorithm_name))

            unstored_result = algorithm.evaluate()
            ##----------------- Prepare a Float object for result ----------------##
            metrics_results[algorithm_name] = unstored_result

        result = DatatypeMeasure(analyzed_datatype=time_series,
                                 storage_path=self.storage_path,
                                 data_name=self._ui_name,
                                 metrics=metrics_results)
        return result
    def launch(self, time_series, algorithms=None, start_point=None, segment=None):
        """ 
        Launch algorithm and build results.

        :param time_series: the time series on which the algorithms are run
        :param algorithms:  the algorithms to be run for computing measures on the time series
        :type  algorithms:  any subclass of BaseTimeseriesMetricAlgorithm
                            (KuramotoIndex, GlobalVariance, VarianceNodeVariance)
        :rtype: `DatatypeMeasure`
        """
        if algorithms is None:
            algorithms = self.available_algorithms.keys()

        shape = time_series.read_data_shape()
        log_debug_array(LOG, time_series, "time_series")

        metrics_results = {}
        for algorithm_name in algorithms:
            ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
            node_slice = [slice(shape[0]), slice(shape[1]), slice(shape[2]), slice(shape[3])]

            ##---------- Iterate over slices and compose final result ------------##
            unstored_ts = TimeSeries(use_storage=False)

            unstored_ts.data = time_series.read_data_slice(tuple(node_slice))

            ##-------------------- Fill Algorithm for Analysis -------------------##
            algorithm = self.available_algorithms[algorithm_name](time_series=unstored_ts)
            if segment is not None:
                algorithm.segment = segment
            if start_point is not None:
                algorithm.start_point = start_point

            ## Validate that current algorithm's filter is valid.
            if (algorithm.accept_filter is not None and
                    not algorithm.accept_filter.get_python_filter_equivalent(time_series)):
                LOG.warning('Measure algorithm will not be computed because of incompatibility on input. '
                            'Filters failed on algo: ' + str(algorithm_name))
                continue
            else:
                LOG.debug("Applying measure: " + str(algorithm_name))

            unstored_result = algorithm.evaluate()
            ##----------------- Prepare a Float object(s) for result ----------------##
            if isinstance(unstored_result, dict):
                metrics_results.update(unstored_result)
            else:
                metrics_results[algorithm_name] = unstored_result

        result = DatatypeMeasure(analyzed_datatype=time_series, storage_path=self.storage_path,
                                 data_name=self._ui_name, metrics=metrics_results)
        return result
示例#9
0
    def launch(self, view_model):
        # type: (WaveletAdapterModel) -> (WaveletCoefficientsIndex)
        """ 
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the wavelet coefficients for the specified time series
        """
        frequencies_array = numpy.array([])
        if view_model.frequencies is not None:
            frequencies_array = view_model.frequencies.to_array()

        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)
        assert isinstance(time_series_h5, TimeSeriesH5)

        # --------------------- Prepare result entities ----------------------##
        wavelet_index = WaveletCoefficientsIndex()
        dest_path = self.path_for(WaveletCoefficientsH5, wavelet_index.gid)
        wavelet_h5 = WaveletCoefficientsH5(path=dest_path)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        node_slice = [
            slice(self.input_shape[0]),
            slice(self.input_shape[1]), None,
            slice(self.input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.sample_period_unit = time_series_h5.sample_period_unit.load()
        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            partial_wavelet = compute_continuous_wavelet_transform(
                small_ts, view_model.frequencies, view_model.sample_period,
                view_model.q_ratio, view_model.normalisation,
                view_model.mother)
            wavelet_h5.write_data_slice(partial_wavelet)

        time_series_h5.close()

        partial_wavelet.source.gid = view_model.time_series
        partial_wavelet.gid = uuid.UUID(wavelet_index.gid)

        wavelet_index.fill_from_has_traits(partial_wavelet)
        self.fill_index_from_h5(wavelet_index, wavelet_h5)

        wavelet_h5.store(partial_wavelet, scalars_only=True)
        wavelet_h5.frequencies.store(frequencies_array)
        wavelet_h5.close()

        return wavelet_index
示例#10
0
    def launch(self, view_model):
        """ 
        Launch algorithm and build results. 
        """
        # --------- Prepare a WaveletCoefficients object for result ----------##
        frequencies_array = numpy.array([])
        if self.algorithm.frequencies is not None:
            frequencies_array = self.algorithm.frequencies.to_array()

        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)
        assert isinstance(time_series_h5, TimeSeriesH5)

        wavelet_index = WaveletCoefficientsIndex()
        dest_path = h5.path_for(self.storage_path, WaveletCoefficientsH5, wavelet_index.gid)

        wavelet_h5 = WaveletCoefficientsH5(path=dest_path)
        wavelet_h5.gid.store(uuid.UUID(wavelet_index.gid))
        wavelet_h5.source.store(time_series_h5.gid.load())
        wavelet_h5.mother.store(self.algorithm.mother)
        wavelet_h5.q_ratio.store(self.algorithm.q_ratio)
        wavelet_h5.sample_period.store(self.algorithm.sample_period)
        wavelet_h5.frequencies.store(frequencies_array)
        wavelet_h5.normalisation.store(self.algorithm.normalisation)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        node_slice = [slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3])]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.sample_period_unit = time_series_h5.sample_period_unit.load()
        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_wavelet = self.algorithm.evaluate()
            wavelet_h5.write_data_slice(partial_wavelet)

        wavelet_h5.close()
        time_series_h5.close()

        wavelet_index.fk_source_gid = self.input_time_series_index.gid
        wavelet_index.mother = self.algorithm.mother
        wavelet_index.normalisation = self.algorithm.normalisation
        wavelet_index.q_ratio = self.algorithm.q_ratio
        wavelet_index.sample_period = self.algorithm.sample_period
        wavelet_index.number_of_scales = frequencies_array.shape[0]
        wavelet_index.frequencies_min, wavelet_index.frequencies_max, _ = from_ndarray(frequencies_array)

        return wavelet_index
示例#11
0
    def launch(self, view_model):
        # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex]
        """
        Launch algorithm and build results. 
        """
        # --------- Prepare a CoherenceSpectrum object for result ------------##
        coherence_spectrum_index = CoherenceSpectrumIndex()
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)

        dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5, coherence_spectrum_index.gid)
        coherence_h5 = CoherenceSpectrumH5(dest_path)
        coherence_h5.gid.store(uuid.UUID(coherence_spectrum_index.gid))
        coherence_h5.source.store(view_model.time_series)
        coherence_h5.nfft.store(self.algorithm.nfft)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        input_shape = time_series_h5.data.shape
        node_slice = [slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3])]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.sample_period_unit = time_series_h5.sample_period_unit.load()
        partial_coh = None
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_coh = self.algorithm.evaluate()
            coherence_h5.write_data_slice(partial_coh)
        coherence_h5.frequency.store(partial_coh.frequency)
        array_metadata = coherence_h5.array_data.get_cached_metadata()
        freq_metadata = coherence_h5.frequency.get_cached_metadata()
        coherence_h5.close()
        time_series_h5.close()

        coherence_spectrum_index.array_data_min = array_metadata.min
        coherence_spectrum_index.array_data_max = array_metadata.max
        coherence_spectrum_index.array_data_mean = array_metadata.mean
        coherence_spectrum_index.array_has_complex = array_metadata.has_complex
        coherence_spectrum_index.array_is_finite = array_metadata.is_finite
        coherence_spectrum_index.shape = json.dumps(coherence_h5.array_data.shape)
        coherence_spectrum_index.ndim = len(coherence_h5.array_data.shape)
        coherence_spectrum_index.fk_source_gid = self.input_time_series_index.gid
        coherence_spectrum_index.nfft = partial_coh.nfft
        coherence_spectrum_index.frequencies_min = freq_metadata.min
        coherence_spectrum_index.frequencies_max = freq_metadata.max
        coherence_spectrum_index.subtype = CoherenceSpectrum.__name__

        return coherence_spectrum_index
    def launch(self, view_model):
        # type: (BalloonModelAdapterModel) -> [TimeSeriesRegionIndex]
        """
        Launch algorithm and build results.

        :param time_series: the input time-series used as neural activation in the Balloon Model
        :returns: the simulated BOLD signal
        :rtype: `TimeSeries`
        """
        input_time_series_h5 = h5.h5_file_for_index(
            self.input_time_series_index)
        time_line = input_time_series_h5.read_time_page(0, self.input_shape[0])

        bold_signal_index = TimeSeriesRegionIndex()
        bold_signal_h5_path = h5.path_for(self.storage_path,
                                          TimeSeriesRegionH5,
                                          bold_signal_index.gid)
        bold_signal_h5 = TimeSeriesRegionH5(bold_signal_h5_path)
        bold_signal_h5.gid.store(uuid.UUID(bold_signal_index.gid))
        self._fill_result_h5(bold_signal_h5, input_time_series_h5)

        # ---------- Iterate over slices and compose final result ------------##

        node_slice = [
            slice(self.input_shape[0]),
            slice(self.input_shape[1]), None,
            slice(self.input_shape[3])
        ]
        small_ts = TimeSeries()
        small_ts.sample_period = self.input_time_series_index.sample_period
        small_ts.time = time_line

        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = input_time_series_h5.read_data_slice(
                tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_bold = self.algorithm.evaluate()
            bold_signal_h5.write_data_slice_on_grow_dimension(
                partial_bold.data, grow_dimension=2)

        bold_signal_h5.write_time_slice(time_line)
        bold_signal_shape = bold_signal_h5.data.shape
        bold_signal_h5.nr_dimensions.store(len(bold_signal_shape))
        bold_signal_h5.close()
        input_time_series_h5.close()

        self._fill_result_index(bold_signal_index, bold_signal_shape)
        return bold_signal_index
    def launch(self, view_model):
        # type: (NodeComplexCoherenceModel) -> [ComplexCoherenceSpectrumIndex]
        """
        Launch algorithm and build results.

        :returns: the `ComplexCoherenceSpectrum` built with the given time-series
        """
        # ------- Prepare a ComplexCoherenceSpectrum object for result -------##
        complex_coherence_spectrum_index = ComplexCoherenceSpectrumIndex()
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)

        dest_path = h5.path_for(self.storage_path, ComplexCoherenceSpectrumH5, complex_coherence_spectrum_index.gid)
        spectra_h5 = ComplexCoherenceSpectrumH5(dest_path)
        spectra_h5.gid.store(uuid.UUID(complex_coherence_spectrum_index.gid))
        spectra_h5.source.store(time_series_h5.gid.load())

        # ------------------- NOTE: Assumes 4D TimeSeries. -------------------##
        input_shape = time_series_h5.data.shape
        node_slice = [slice(input_shape[0]), slice(input_shape[1]), slice(input_shape[2]), slice(input_shape[3])]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
        self.algorithm.time_series = small_ts

        partial_result = self.algorithm.evaluate()
        self.log.debug("got partial_result")
        self.log.debug("partial segment_length is %s" % (str(partial_result.segment_length)))
        self.log.debug("partial epoch_length is %s" % (str(partial_result.epoch_length)))
        self.log.debug("partial windowing_function is %s" % (str(partial_result.windowing_function)))
        # LOG.debug("partial frequency vector is %s" % (str(partial_result.frequency)))

        spectra_h5.write_data_slice(partial_result)
        spectra_h5.segment_length.store(partial_result.segment_length)
        spectra_h5.epoch_length.store(partial_result.epoch_length)
        spectra_h5.windowing_function.store(partial_result.windowing_function)
        # spectra.frequency = partial_result.frequency
        spectra_h5.close()
        time_series_h5.close()

        complex_coherence_spectrum_index.fk_source_gid = self.input_time_series_index.gid
        complex_coherence_spectrum_index.epoch_length = partial_result.epoch_length
        complex_coherence_spectrum_index.segment_length = partial_result.segment_length
        complex_coherence_spectrum_index.windowing_function = partial_result.windowing_function
        complex_coherence_spectrum_index.frequency_step = partial_result.freq_step
        complex_coherence_spectrum_index.max_frequency = partial_result.max_freq

        return complex_coherence_spectrum_index
    def launch(self, view_model):
        # type: (NodeCovarianceAdapterModel) -> [CovarianceIndex]
        """ 
        Launch algorithm and build results.

        :returns: the `CovarianceIndex` built with the given time_series index as source
        """
        # Create an index for the computed covariance.
        covariance_index = CovarianceIndex()
        covariance_h5_path = h5.path_for(self.storage_path, CovarianceH5,
                                         covariance_index.gid)
        covariance_h5 = CovarianceH5(covariance_h5_path)

        # NOTE: Assumes 4D, Simulator timeSeries.
        node_slice = [
            slice(self.input_shape[0]), None,
            slice(self.input_shape[2]), None
        ]

        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            for mode in range(self.input_shape[3]):
                for var in range(self.input_shape[1]):
                    small_ts = TimeSeries()
                    node_slice[1] = slice(var, var + 1)
                    node_slice[3] = slice(mode, mode + 1)
                    small_ts.data = ts_h5.read_data_slice(tuple(node_slice))
                    partial_cov = self._compute_node_covariance(
                        small_ts, ts_h5)
                    covariance_h5.write_data_slice(partial_cov.array_data)
            array_metadata = covariance_h5.array_data.get_cached_metadata()

        covariance_index.fk_source_gid = self.input_time_series_index.gid
        covariance_index.subtype = type(covariance_index).__name__
        covariance_index.array_has_complex = array_metadata.has_complex

        if not covariance_index.array_has_complex:
            covariance_index.array_data_min = float(array_metadata.min)
            covariance_index.array_data_max = float(array_metadata.max)
            covariance_index.array_data_mean = float(array_metadata.mean)

        covariance_index.array_is_finite = array_metadata.is_finite
        covariance_index.shape = json.dumps(covariance_h5.array_data.shape)
        covariance_index.ndim = len(covariance_h5.array_data.shape)
        # TODO write this part better, by moving into the Model fill_from...

        covariance_h5.gid.store(uuid.UUID(covariance_index.gid))
        covariance_h5.source.store(view_model.time_series)
        covariance_h5.close()
        return covariance_index
示例#15
0
    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(1, count_ts, "Should be only one TimeSeries")
示例#16
0
    def launch(self, view_model):
        # type: (PearsonCorrelationCoefficientAdapterModel) -> [CorrelationCoefficientsIndex]
        """
        Launch algorithm and build results.
        Compute the node-pairwise pearson correlation coefficient of the given input 4D TimeSeries  datatype.

        The result will contain values between -1 and 1, inclusive.

        :param time_series: the input time-series for which correlation coefficient should be computed
        :param t_start: the physical time interval start for the analysis
        :param t_end: physical time, interval end
        :returns: the correlation coefficient for the given time series
        :rtype: `CorrelationCoefficients`
        """
        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            ts_labels_ordering = ts_h5.labels_ordering.load()
            result = self._compute_correlation_coefficients(ts_h5, view_model.t_start, view_model.t_end)

        if isinstance(self.input_time_series_index, TimeSeriesEEGIndex) \
                or isinstance(self.input_time_series_index, TimeSeriesMEGIndex) \
                or isinstance(self.input_time_series_index, TimeSeriesSEEGIndex):
            labels_ordering = ["Sensor", "Sensor", "1", "1"]
        else:
            labels_ordering = list(CorrelationCoefficients.labels_ordering.default)
            labels_ordering[0] = ts_labels_ordering[2]
            labels_ordering[1] = ts_labels_ordering[2]

        corr_coef = CorrelationCoefficients()
        corr_coef.array_data = result
        corr_coef.source = TimeSeries(gid=view_model.time_series)
        corr_coef.labels_ordering = labels_ordering

        return h5.store_complete(corr_coef, self.storage_path)
示例#17
0
def test_store_load_region_mapping(session, connectivity_factory,
                                   surface_factory, region_mapping_factory,
                                   sensors_factory):
    connectivity = connectivity_factory(2)
    conn_idx = ConnectivityIndex()
    conn_idx.fill_from_has_traits(connectivity)
    session.add(conn_idx)

    surface = surface_factory(5)
    surf_idx = SurfaceIndex()
    surf_idx.fill_from_has_traits(surface)
    session.add(surf_idx)

    region_mapping = region_mapping_factory(surface, connectivity)
    rm_idx = RegionMappingIndex()
    rm_idx.fill_from_has_traits(region_mapping)
    rm_idx.connectivity = conn_idx
    rm_idx.surface = surf_idx
    session.add(rm_idx)

    sensors = sensors_factory("SEEG", 3)
    sensors_seeg_idx = SensorsIndex()
    sensors_seeg_idx.fill_from_has_traits(sensors)
    session.add(sensors_seeg_idx)

    sensors_eeg = sensors_factory("EEG", 3)
    sensors_eeg_idx = SensorsIndex()
    sensors_eeg_idx.fill_from_has_traits(sensors_eeg)
    session.add(sensors_eeg_idx)

    time_series = TimeSeries(data=numpy.arange(5))

    fcd = Fcd(
        array_data=numpy.arange(5),
        source=time_series,
    )

    ts_index = TimeSeriesIndex()
    ts_index.fill_from_has_traits(time_series)
    session.add(ts_index)

    fcd_index = FcdIndex()
    fcd_index.fill_from_has_traits(fcd)
    fcd_index.source = ts_index
    session.add(fcd_index)

    session.commit()

    res = session.query(ConnectivityIndex)
    assert res.count() == 1
    assert res[0].number_of_regions == 2
    assert res[0].number_of_connections == 4
    assert res[0].undirected is True
    assert res[0].weights_min == 0

    res = session.query(SurfaceIndex)
    assert res.count() == 1

    res = session.query(RegionMappingIndex)
    assert res.count() == 1
    def _import(self, import_file_path=None):
        """
        This method is used for importing data in NIFIT format
        :param import_file_path: absolute path of the file to be imported
        """

        ### Retrieve Adapter instance
        group = dao.find_group('tvb.adapters.uploaders.nifti_importer',
                               'NIFTIImporter')
        importer = ABCAdapter.build_adapter(group)
        importer.meta_data = {
            DataTypeMetaData.KEY_SUBJECT: "",
            DataTypeMetaData.KEY_STATE: "RAW"
        }

        args = {'data_file': import_file_path}

        ### Launch import Operation
        FlowService().fire_operation(importer, self.test_user,
                                     self.test_project.id, **args)

        time_series = TimeSeries()
        data_types = FlowService().get_available_datatypes(
            self.test_project.id, time_series.module + "." + time_series.type)
        self.assertEqual(1, len(data_types),
                         "Project should contain only one data type.")

        time_series = ABCAdapter.load_entity_by_gid(data_types[0][2])
        self.assertTrue(time_series is not None,
                        "TimeSeries should not be none")

        return time_series
示例#19
0
    def build():
        ts_index = time_series_factory()

        ts_h5 = h5_file_for_index(ts_index)
        ts = TimeSeries()
        ts_h5.load_into(ts)
        ts_h5.close()

        data_shape = ts.data.shape

        result_shape = (data_shape[2], data_shape[2], data_shape[1],
                        data_shape[3])
        result = numpy.zeros(result_shape)

        for mode in range(data_shape[3]):
            for var in range(data_shape[1]):
                data = ts_h5.data[:, var, :, mode]
                data = data - data.mean(axis=0)[numpy.newaxis, 0]
                result[:, :, var, mode] = numpy.cov(data.T)

        covariance = Covariance(source=ts, array_data=result)

        op = operation_factory()

        covariance_db = CovarianceIndex()
        covariance_db.fk_from_operation = op.id
        covariance_db.fill_from_has_traits(covariance)

        covariance_h5_path = h5.path_for_stored_index(covariance_db)
        with TimeSeriesH5(covariance_h5_path) as f:
            f.store(ts)

        session.add(covariance_db)
        session.commit()
        return covariance_db
示例#20
0
    def create_time_series(self,
                           storage_path,
                           connectivity=None,
                           surface=None,
                           region_map=None,
                           region_volume_map=None):
        """
        Create a time series instance that will be populated by this monitor
        :param surface: if present a TimeSeriesSurface is returned
        :param connectivity: if present a TimeSeriesRegion is returned
        Otherwise a plain TimeSeries will be returned
        """
        if surface is not None:
            return TimeSeriesSurface(storage_path=storage_path,
                                     surface=surface,
                                     sample_period=self.period,
                                     title='Surface ' +
                                     self.__class__.__name__,
                                     **self._transform_user_tags())
        if connectivity is not None:
            return TimeSeriesRegion(storage_path=storage_path,
                                    connectivity=connectivity,
                                    region_mapping=region_map,
                                    region_mapping_volume=region_volume_map,
                                    sample_period=self.period,
                                    title='Regions ' + self.__class__.__name__,
                                    **self._transform_user_tags())

        return TimeSeries(storage_path=storage_path,
                          sample_period=self.period,
                          title=' ' + self.__class__.__name__,
                          **self._transform_user_tags())
示例#21
0
    def launch(self, view_model):
        # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex]
        """
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the node coherence for the specified time series
        """
        # -------------------- Prepare result entities -----------------------##
        coherence_spectrum_index = CoherenceSpectrumIndex()
        dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5,
                                coherence_spectrum_index.gid)
        coherence_h5 = CoherenceSpectrumH5(dest_path)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        small_ts.sample_period_unit = time_series_h5.sample_period_unit.load()
        partial_coh = None
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            partial_coh = calculate_cross_coherence(small_ts, view_model.nfft)
            coherence_h5.write_data_slice(partial_coh)

        time_series_h5.close()

        partial_coh.source.gid = view_model.time_series
        partial_coh.gid = uuid.UUID(coherence_spectrum_index.gid)

        coherence_spectrum_index.fill_from_has_traits(partial_coh)
        self.fill_index_from_h5(coherence_spectrum_index, coherence_h5)

        coherence_h5.store(partial_coh, scalars_only=True)
        coherence_h5.frequency.store(partial_coh.frequency)
        coherence_h5.close()

        return coherence_spectrum_index
 def create_ICA(self, timeseries):
     """
     :returns: persisted entity IndependentComponents
     """
     operation, _, storage_path = self.__create_operation()
     partial_ts = TimeSeries(use_storage=False)
     partial_ts.data = numpy.random.random((10, 10, 10, 10))
     partial_ica = IndependentComponents(source=partial_ts,
                                         component_time_series=numpy.random.random((10, 10, 10, 10)),
                                         prewhitening_matrix=numpy.random.random((10, 10, 10, 10)),
                                         unmixing_matrix=numpy.random.random((10, 10, 10, 10)),
                                         n_components=10, use_storage=False)
     ica = IndependentComponents(source=timeseries, n_components=10, storage_path=storage_path)
     ica.write_data_slice(partial_ica)
     adapter_instance = StoreAdapter([ica])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return ica
示例#23
0
 def create_ICA(self, timeseries):
     """
     :returns: persisted entity IndependentComponents
     """
     operation, _, storage_path = self.__create_operation()
     partial_ts = TimeSeries(use_storage=False)
     partial_ts.data = numpy.random.random((10, 10, 10, 10))
     partial_ica = IndependentComponents(source=partial_ts,
                                         component_time_series=numpy.random.random((10, 10, 10, 10)),
                                         prewhitening_matrix=numpy.random.random((10, 10, 10, 10)),
                                         unmixing_matrix=numpy.random.random((10, 10, 10, 10)),
                                         n_components=10, use_storage=False)
     ica = IndependentComponents(source=timeseries, n_components=10, storage_path=storage_path)
     ica.write_data_slice(partial_ica)
     adapter_instance = StoreAdapter([ica])
     OperationService().initiate_prelaunch(operation, adapter_instance, {})
     return ica
示例#24
0
    def launch(self, view_model):
        # type: (FooDataImporterModel) -> TimeSeriesIndex

        array_data = numpy.loadtxt(view_model.array_data)

        ts = TimeSeries(data=array_data)
        ts.configure()

        ts_index = TimeSeriesIndex()
        ts_index.fill_from_has_traits(ts)

        ts_h5_path = h5.path_for(self.storage_path, TimeSeriesH5, ts_index.gid)

        with TimeSeriesH5(ts_h5_path) as ts_h5:
            ts_h5.store(ts, scalars_only=True)
            ts_h5.store_generic_attributes(GenericAttributes())
            ts_h5.write_data_slice(array_data)
        return ts_index
示例#25
0
 def plot_timeseries_interactive(self, timeseries, first_n=-1, **kwargs):
     if isinstance(timeseries, TimeSeries):
         self.plot_tvb_timeseries_interactive(timeseries, first_n, **kwargs)
     elif isinstance(timeseries, Timeseries):
         self.plot_tvb_timeseries_interactive(timeseries._tvb, first_n, **kwargs)
     elif isinstance(timeseries, numpy.ndarray):
         self.plot_tvb_timeseries_interactive(Timeseries(data=timeseries), first_n, **kwargs)
     elif isinstance(timeseries, (list, tuple)):
         self.plot_tvb_timeseries_interactive(Timeseries(data=TimeSeries(data=numpy.stack(timeseries, axis=1))),
                                              first_n, **kwargs)
     elif isinstance(timeseries, dict):
         ts = numpy.stack(timeseries.values(), axis=1)
         timeseries = TimeSeries(data=ts, labels_dimensions={"State Variable": timeseries.keys()})
         self.plot_tvb_timeseries_interactive(timeseries, first_n, **kwargs)
     else:
         raise_value_error("Input timeseries: %s \n" "is not on of one of the following types: "
                          "[Timeseries (tvb-scripts), TimeSeries (TVB), numpy.ndarray, dict, list, tuple]" %
                           str(timeseries))
示例#26
0
    def launch(self,
               time_series,
               mother=None,
               sample_period=None,
               normalisation=None,
               q_ratio=None,
               frequencies='Range',
               frequencies_parameters=None):
        """ 
        Launch algorithm and build results. 
        """
        ##--------- Prepare a WaveletCoefficients object for result ----------##
        frequencies_array = numpy.array([])
        if self.algorithm.frequencies is not None:
            frequencies_array = numpy.array(list(self.algorithm.frequencies))
        wavelet = WaveletCoefficients(
            source=time_series,
            mother=self.algorithm.mother,
            q_ratio=self.algorithm.q_ratio,
            sample_period=self.algorithm.sample_period,
            frequencies=frequencies_array,
            normalisation=self.algorithm.normalisation,
            storage_path=self.storage_path)

        ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        node_slice = [
            slice(self.input_shape[0]),
            slice(self.input_shape[1]), None,
            slice(self.input_shape[3])
        ]

        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        small_ts.sample_rate = time_series.sample_rate
        small_ts.sample_period = time_series.sample_period
        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_wavelet = self.algorithm.evaluate()
            wavelet.write_data_slice(partial_wavelet)

        wavelet.close_file()
        return wavelet
    def launch(self, view_model):
        # type: (CrossCorrelateAdapterModel) -> [CrossCorrelationIndex]
        """ 
        Launch algorithm and build results.
        Compute the node-pairwise cross-correlation of the source 4D TimeSeries represented by the index given as input.

        Return a CrossCorrelationIndex. Create a CrossCorrelationH5 that contains the cross-correlation
        sequences for all possible combinations of the nodes.

        See: http://www.scipy.org/doc/api_docs/SciPy.signal.signaltools.html#correlate

        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the cross correlation index for the given time series
        :rtype: `CrossCorrelationIndex`
        """
        # --------- Prepare CrossCorrelationIndex and CrossCorrelationH5 objects for result ------------##
        cross_corr_index = CrossCorrelationIndex()
        cross_corr_h5_path = h5.path_for(self.storage_path, CrossCorrelationH5, cross_corr_index.gid)
        cross_corr_h5 = CrossCorrelationH5(cross_corr_h5_path)

        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()

        with h5.h5_file_for_index(self.input_time_series_index) as ts_h5:
            small_ts.sample_period = ts_h5.sample_period.load()
            small_ts.sample_period_unit = ts_h5.sample_period_unit.load()
            partial_cross_corr = None
            for var in range(self.input_shape[1]):
                node_slice[1] = slice(var, var + 1)
                small_ts.data = ts_h5.read_data_slice(tuple(node_slice))
                partial_cross_corr = self._compute_cross_correlation(small_ts, ts_h5)
                cross_corr_h5.write_data_slice(partial_cross_corr)

        partial_cross_corr.source.gid = view_model.time_series
        partial_cross_corr.gid = uuid.UUID(cross_corr_index.gid)

        cross_corr_index.fill_from_has_traits(partial_cross_corr)
        self.fill_index_from_h5(cross_corr_index, cross_corr_h5)

        cross_corr_h5.store(partial_cross_corr, scalars_only=True)
        cross_corr_h5.close()

        return cross_corr_index
示例#28
0
    def _create_timeseries(self):
        """Launch adapter to persist a TimeSeries entity"""
        storage_path = FilesHelper().get_project_folder(self.test_project, str(self.operation.id))

        time_series = TimeSeries()
        time_series.sample_period = 10.0
        time_series.start_time = 0.0
        time_series.storage_path = storage_path
        time_series.write_data_slice(numpy.array([1.0, 2.0, 3.0]))
        time_series.close_file()
        time_series.sample_period_unit = 'ms'

        self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series")
        count_ts = self.count_all_entities(TimeSeries)
        self.assertEqual(1, count_ts, "Should be only one TimeSeries")
    def launch(self, time_series):
        """
        Launch algorithm and build results.

        :returns: the `ComplexCoherenceSpectrum` built with the given time-series
        """
        shape = time_series.read_data_shape()

        ##------- Prepare a ComplexCoherenceSpectrum object for result -------##
        spectra = ComplexCoherenceSpectrum(source=time_series,
                                           storage_path=self.storage_path)

        ##------------------- NOTE: Assumes 4D TimeSeries. -------------------##
        node_slice = [
            slice(shape[0]),
            slice(shape[1]),
            slice(shape[2]),
            slice(shape[3])
        ]

        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        small_ts.sample_rate = time_series.sample_rate
        small_ts.data = time_series.read_data_slice(tuple(node_slice))
        self.algorithm.time_series = small_ts

        partial_result = self.algorithm.evaluate()
        LOG.debug("got partial_result")
        LOG.debug("partial segment_length is %s" %
                  (str(partial_result.segment_length)))
        LOG.debug("partial epoch_length is %s" %
                  (str(partial_result.epoch_length)))
        LOG.debug("partial windowing_function is %s" %
                  (str(partial_result.windowing_function)))
        #LOG.debug("partial frequency vector is %s" % (str(partial_result.frequency)))

        spectra.write_data_slice(partial_result)
        spectra.segment_length = partial_result.segment_length
        spectra.epoch_length = partial_result.epoch_length
        spectra.windowing_function = partial_result.windowing_function
        #spectra.frequency = partial_result.frequency
        spectra.close_file()
        return spectra
    def launch(self, time_series, nfft=None):
        """ 
        Launch algorithm and build results. 
        """
        # --------- Prepare a CoherenceSpectrum object for result ------------##
        coherence_spectrum_index = CoherenceSpectrumIndex()
        time_series_h5 = h5.h5_file_for_index(time_series)

        dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5,
                                coherence_spectrum_index.gid)
        coherence_h5 = CoherenceSpectrumH5(dest_path)
        coherence_h5.gid.store(uuid.UUID(coherence_spectrum_index.gid))
        coherence_h5.source.store(time_series_h5.gid.load())
        coherence_h5.nfft.store(self.algorithm.nfft)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        small_ts.sample_period = time_series_h5.sample_period.load()
        partial_coh = None
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_coh = self.algorithm.evaluate()
            coherence_h5.write_data_slice(partial_coh)
        coherence_h5.frequency.store(partial_coh.frequency)
        coherence_h5.close()
        coherence_spectrum_index.ndim = len(coherence_h5.array_data.shape)
        time_series_h5.close()

        coherence_spectrum_index.source_gid = self.input_time_series_index.gid
        coherence_spectrum_index.nfft = partial_coh.nfft
        coherence_spectrum_index.frequencies = partial_coh.frequency

        return coherence_spectrum_index
示例#31
0
    def launch(self, view_model):
        # type: (ICAAdapterModel) -> [IndependentComponentsIndex]
        """
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the ica index for the specified time series
        """
        # --------------------- Prepare result entities ---------------------##
        ica_index = IndependentComponentsIndex()
        result_path = h5.path_for(self.storage_path, IndependentComponentsH5,
                                  ica_index.gid)
        ica_h5 = IndependentComponentsH5(path=result_path)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            partial_ica = compute_ica_decomposition(small_ts,
                                                    view_model.n_components)
            ica_h5.write_data_slice(partial_ica)

        time_series_h5.close()

        partial_ica.source.gid = view_model.time_series
        partial_ica.gid = uuid.UUID(ica_index.gid)

        ica_h5.store(partial_ica, scalars_only=True)
        ica_h5.close()

        ica_index.fill_from_has_traits(partial_ica)

        return ica_index
示例#32
0
    def launch(self, view_model):
        # type: (ICAAdapterModel) -> [IndependentComponentsIndex]
        """ 
        Launch algorithm and build results. 
        """
        # --------- Prepare a IndependentComponents object for result ----------##
        ica_index = IndependentComponentsIndex()
        ica_index.fk_source_gid = view_model.time_series.hex

        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)

        result_path = h5.path_for(self.storage_path, IndependentComponentsH5,
                                  ica_index.gid)
        ica_h5 = IndependentComponentsH5(path=result_path)
        ica_h5.gid.store(uuid.UUID(ica_index.gid))
        ica_h5.source.store(view_model.time_series)
        ica_h5.n_components.store(self.algorithm.n_components)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_ica = self.algorithm.evaluate()
            ica_h5.write_data_slice(partial_ica)
        array_metadata = ica_h5.unmixing_matrix.get_cached_metadata()
        ica_index.array_has_complex = array_metadata.has_complex
        ica_index.shape = json.dumps(ica_h5.unmixing_matrix.shape)
        ica_index.ndim = len(ica_h5.unmixing_matrix.shape)
        ica_h5.close()
        time_series_h5.close()

        return ica_index
示例#33
0
    def build(data=None):
        time = numpy.linspace(0, 1000, 4000)

        if data is None:
            data = numpy.zeros((time.size, 1, 3, 1))
            data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40)
            data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200)
            data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + numpy.sin(
                2 * numpy.pi * time / 1000.0 * 300)

        return TimeSeries(time=time, data=data, sample_period=1.0 / 4000, sample_period_unit="sec")
示例#34
0
    def launch(self, time_series):
        """ 
        Launch algorithm and build results.

        :returns: the `CovarianceIndex` built with the given time_series index as source
        """
        # Create an index for the computed covariance.
        covariance_index = CovarianceIndex()
        covariance_h5_path = h5.path_for(self.storage_path, CovarianceH5,
                                         covariance_index.gid)
        covariance_h5 = CovarianceH5(covariance_h5_path)

        # NOTE: Assumes 4D, Simulator timeSeries.
        node_slice = [
            slice(self.input_shape[0]), None,
            slice(self.input_shape[2]), None
        ]

        with h5.h5_file_for_index(time_series) as ts_h5:
            for mode in range(self.input_shape[3]):
                for var in range(self.input_shape[1]):
                    small_ts = TimeSeries()
                    node_slice[1] = slice(var, var + 1)
                    node_slice[3] = slice(mode, mode + 1)
                    small_ts.data = ts_h5.read_data_slice(tuple(node_slice))
                    partial_cov = self._compute_node_covariance(
                        small_ts, ts_h5)
                    covariance_h5.write_data_slice(partial_cov.array_data)
            ts_array_metadata = covariance_h5.array_data.get_cached_metadata()

        covariance_index.source_gid = time_series.gid
        covariance_index.subtype = type(covariance_index).__name__
        covariance_index.array_data_min = ts_array_metadata.min
        covariance_index.array_data_max = ts_array_metadata.max
        covariance_index.array_data_mean = ts_array_metadata.mean
        covariance_index.ndim = len(covariance_h5.array_data.shape)

        covariance_h5.gid.store(uuid.UUID(covariance_index.gid))
        covariance_h5.source.store(uuid.UUID(time_series.gid))
        covariance_h5.close()
        return covariance_index
示例#35
0
    def launch(self, view_model):
        # type: (PCAAdapterModel) -> [PrincipalComponentsIndex]
        """ 
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the `PrincipalComponentsIndex` object built with the given timeseries as source
        """
        # --------------------- Prepare result entities ----------------------##
        principal_components_index = PrincipalComponentsIndex()
        dest_path = h5.path_for(self.storage_path, PrincipalComponentsH5,
                                principal_components_index.gid)
        pca_h5 = PrincipalComponentsH5(path=dest_path)

        # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        time_series_h5 = h5.h5_file_for_index(self.input_time_series_index)
        input_shape = time_series_h5.data.shape
        node_slice = [
            slice(input_shape[0]), None,
            slice(input_shape[2]),
            slice(input_shape[3])
        ]

        # ---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries()
        for var in range(input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series_h5.read_data_slice(tuple(node_slice))
            self.time_series = small_ts.gid
            partial_pca = compute_pca(small_ts)
            pca_h5.write_data_slice(partial_pca)

        time_series_h5.close()

        partial_pca.source.gid = view_model.time_series
        partial_pca.gid = uuid.UUID(principal_components_index.gid)
        principal_components_index.fill_from_has_traits(partial_pca)

        pca_h5.store(partial_pca, scalars_only=True)
        pca_h5.close()

        return principal_components_index
示例#36
0
    def launch(self, array_data):

        array_data = numpy.loadtxt(array_data)

        ts = TimeSeries()
        ts.storage_path = self.storage_path
        #ts.configure()
        ts.write_data_slice(array_data)
        ts.close_file()
        return ts
示例#37
0
    def launch(self,
               time_series,
               dt=None,
               bold_model=None,
               RBM=None,
               neural_input_transformation=None):
        """
        Launch algorithm and build results.

        :param time_series: the input time-series used as neural activation in the Balloon Model
        :returns: the simulated BOLD signal
        :rtype: `TimeSeries`
        """
        time_line = time_series.read_time_page(0, self.input_shape[0])
        bold_signal = TimeSeriesRegion(storage_path=self.storage_path,
                                       sample_period=time_series.sample_period,
                                       start_time=time_series.start_time,
                                       connectivity=time_series.connectivity)

        ##---------- Iterate over slices and compose final result ------------##

        node_slice = [
            slice(self.input_shape[0]),
            slice(self.input_shape[1]), None,
            slice(self.input_shape[3])
        ]
        small_ts = TimeSeries(use_storage=False,
                              sample_period=time_series.sample_period,
                              time=time_line)

        for node in range(self.input_shape[2]):
            node_slice[2] = slice(node, node + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts
            partial_bold = self.algorithm.evaluate()
            bold_signal.write_data_slice(partial_bold.data, grow_dimension=2)

        bold_signal.write_time_slice(time_line)
        bold_signal.close_file()
        return bold_signal
    def launch(self, view_model):
        # type: (NodeCovarianceAdapterModel) -> [CovarianceIndex]
        """ 
        Launch algorithm and build results.
        :param view_model: the ViewModel keeping the algorithm inputs
        :return: the `CovarianceIndex` built with the given time_series index as source
        """
        # -------------------- Prepare result entities ---------------------##
        covariance_index = CovarianceIndex()
        covariance_h5_path = h5.path_for(self.storage_path, CovarianceH5,
                                         covariance_index.gid)
        covariance_h5 = CovarianceH5(covariance_h5_path)

        # ------------ NOTE: Assumes 4D, Simulator timeSeries -------------##
        node_slice = [
            slice(self.input_shape[0]), None,
            slice(self.input_shape[2]), None
        ]
        ts_h5 = h5.h5_file_for_index(self.input_time_series_index)

        for mode in range(self.input_shape[3]):
            for var in range(self.input_shape[1]):
                small_ts = TimeSeries()
                node_slice[1] = slice(var, var + 1)
                node_slice[3] = slice(mode, mode + 1)
                small_ts.data = ts_h5.read_data_slice(tuple(node_slice))
                partial_cov = self._compute_node_covariance(small_ts, ts_h5)
                covariance_h5.write_data_slice(partial_cov.array_data)

        ts_h5.close()

        partial_cov.source.gid = view_model.time_series
        partial_cov.gid = uuid.UUID(covariance_index.gid)

        covariance_index.fill_from_has_traits(partial_cov)
        self.fill_index_from_h5(covariance_index, covariance_h5)

        covariance_h5.store(partial_cov, scalars_only=True)
        covariance_h5.close()
        return covariance_index
示例#39
0
    def launch(self, time_series):
        """ 
        Launch algorithm and build results.

        :returns: the `PrincipalComponents` object built with the given timeseries as source
        """
        ##--------- Prepare a PrincipalComponents object for result ----------##
        pca_result = PrincipalComponents(source=time_series, storage_path=self.storage_path)
        
        ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
        
        ##---------- Iterate over slices and compose final result ------------##
        small_ts = TimeSeries(use_storage=False)
        for var in range(self.input_shape[1]):
            node_slice[1] = slice(var, var + 1)
            small_ts.data = time_series.read_data_slice(tuple(node_slice))
            self.algorithm.time_series = small_ts 
            partial_pca = self.algorithm.evaluate()
            pca_result.write_data_slice(partial_pca)
        pca_result.close_file()
        return pca_result
 def launch(self, time_series, n_components=None):
     """ 
     Launch algorithm and build results. 
     """
     ##--------- Prepare a IndependentComponents object for result ----------##
     ica_result = IndependentComponents(source=time_series,
                                        n_components=int(self.algorithm.n_components),
                                        storage_path=self.storage_path)
     
     ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------##
     node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])]
     
     ##---------- Iterate over slices and compose final result ------------##
     small_ts = TimeSeries(use_storage=False)
     for var in range(self.input_shape[1]):
         node_slice[1] = slice(var, var + 1)
         small_ts.data = time_series.read_data_slice(tuple(node_slice))
         self.algorithm.time_series = small_ts 
         partial_ica = self.algorithm.evaluate()
         ica_result.write_data_slice(partial_ica)
     ica_result.close_file()
     return ica_result
    def launch(self, time_series):
        """ 
        Launch algorithm and build results.

        :returns: the `Covariance` built with the given timeseries as source
        """
        
        #Create a FourierSpectrum dataType object.
        covariance = Covariance(source=time_series, storage_path=self.storage_path)
        
        #NOTE: Assumes 4D, Simulator timeSeries.
        node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), None]
        
        for mode in range(self.input_shape[3]):
            for var in range(self.input_shape[1]):
                small_ts = TimeSeries(use_storage=False)
                node_slice[1] = slice(var, var + 1)
                node_slice[3] = slice(mode, mode + 1)
                small_ts.data = time_series.read_data_slice(tuple(node_slice))
                self.algorithm.time_series = small_ts 
                partial_cov = self.algorithm.evaluate()
                covariance.write_data_slice(partial_cov.array_data)
        covariance.close_file()
        return covariance
    def launch(self, array_data):

        array_data = numpy.loadtxt(array_data)

        ts = TimeSeries()
        ts.storage_path = self.storage_path
        #ts.configure()
        ts.write_data_slice(array_data)
        ts.close_file()
        return ts
    def launch(self, matfile):
        mat = scipy.io.loadmat(matfile)
        hdr = mat['hdr']
        fs, ns = [hdr[key][0, 0][0, 0] for key in ['Fs', 'nSamples']]

        # the entities to populate
        #ch = Sensors(storage_path=self.storage_path)
        ts = TimeSeries(storage_path=self.storage_path)

        # (nchan x ntime) -> (t, sv, ch, mo)
        dat = mat['dat'].T[:, numpy.newaxis, :, numpy.newaxis]

        # write data
        ts.write_data_slice(dat)

        # fill in header info
        ts.length_1d, ts.length_2d, ts.length_3d, ts.length_4d = dat.shape
        ts.labels_ordering = 'Time 1 Channel 1'.split()
        ts.write_time_slice(numpy.r_[:ns] * 1.0 / fs)
        ts.start_time = 0.0
        ts.sample_period_unit = 's'
        ts.sample_period = 1.0 / float(fs)
        ts.close_file()

        # setup sensors information

        # ch.labels = numpy.array(
        #     [str(l[0]) for l in hdr['label'][0, 0][:, 0]])
        # ch.number_of_sensors = ch.labels.size

        return ts #, ch