def launch(self, view_model): # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex] """ Launch algorithm and build results. """ # --------- Prepare a CoherenceSpectrum object for result ------------## coherence_spectrum_index = CoherenceSpectrumIndex() time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) dest_path = h5.path_for(self.storage_path, CoherenceSpectrumH5, coherence_spectrum_index.gid) coherence_h5 = CoherenceSpectrumH5(dest_path) coherence_h5.gid.store(uuid.UUID(coherence_spectrum_index.gid)) coherence_h5.source.store(time_series_h5.gid.load()) coherence_h5.nfft.store(self.algorithm.nfft) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() small_ts.sample_period = time_series_h5.sample_period.load() partial_coh = None for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_coh = self.algorithm.evaluate() coherence_h5.write_data_slice(partial_coh) coherence_h5.frequency.store(partial_coh.frequency) coherence_h5.close() coherence_spectrum_index.ndim = len(coherence_h5.array_data.shape) time_series_h5.close() coherence_spectrum_index.source_gid = self.input_time_series_index.gid coherence_spectrum_index.nfft = partial_coh.nfft coherence_spectrum_index.frequencies = partial_coh.frequency return coherence_spectrum_index
def launch(self, time_series, mother=None, sample_period=None, normalisation=None, q_ratio=None, frequencies='Range', frequencies_parameters=None): """ Launch algorithm and build results. """ ##--------- Prepare a WaveletCoefficients object for result ----------## frequencies_array = numpy.array([]) if self.algorithm.frequencies is not None: frequencies_array = numpy.array(list(self.algorithm.frequencies)) wavelet = WaveletCoefficients( source=time_series, mother=self.algorithm.mother, q_ratio=self.algorithm.q_ratio, sample_period=self.algorithm.sample_period, frequencies=frequencies_array, normalisation=self.algorithm.normalisation, storage_path=self.storage_path) ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] ##---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries(use_storage=False) small_ts.sample_rate = time_series.sample_rate small_ts.sample_period = time_series.sample_period for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_wavelet = self.algorithm.evaluate() wavelet.write_data_slice(partial_wavelet) wavelet.close_file() return wavelet
def launch(self, view_model): # type: (FourierSpectrumModel) -> dict self.log.debug("Plot started...") # these partial loads are dangerous for TS and FS instances, but efficient fs_input_index = self.load_entity_by_gid(view_model.input_data) fourier_spectrum = FourierSpectrum() with h5.h5_file_for_index(fs_input_index) as input_h5: shape = list(input_h5.array_data.shape) fourier_spectrum.segment_length = input_h5.segment_length.load() fourier_spectrum.windowing_function = input_h5.windowing_function.load( ) ts_index = self.load_entity_by_gid(fs_input_index.fk_source_gid) state_list = ts_index.get_labels_for_dimension(1) if len(state_list) == 0: state_list = list(range(shape[1])) fourier_spectrum.source = TimeSeries( sample_period=ts_index.sample_period) mode_list = list(range(shape[3])) available_scales = ["Linear", "Logarithmic"] params = dict(matrix_shape=json.dumps([shape[0], shape[2]]), plotName=ts_index.title, url_base=URLGenerator.build_h5_url(view_model.input_data, "get_fourier_data", parameter=""), xAxisName="Frequency [kHz]", yAxisName="Power", available_scales=available_scales, state_list=state_list, mode_list=mode_list, normalize_list=["no", "yes"], normalize="no", state_variable=state_list[0], mode=mode_list[0], xscale=available_scales[0], yscale=available_scales[0], x_values=json.dumps(fourier_spectrum.frequency[slice( shape[0])].tolist()), xmin=fourier_spectrum.freq_step, xmax=fourier_spectrum.max_freq) return self.build_display_result("fourier_spectrum/view", params)
def launch(self, view_model): # type: (CrossCorrelateAdapterModel) -> [CrossCorrelationIndex] """ Launch algorithm and build results. Compute the node-pairwise cross-correlation of the source 4D TimeSeries represented by the index given as input. Return a CrossCorrelationIndex. Create a CrossCorrelationH5 that contains the cross-correlation sequences for all possible combinations of the nodes. See: http://www.scipy.org/doc/api_docs/SciPy.signal.signaltools.html#correlate :param view_model: the ViewModel keeping the algorithm inputs :return: the cross correlation index for the given time series :rtype: `CrossCorrelationIndex` """ # --------- Prepare CrossCorrelationIndex and CrossCorrelationH5 objects for result ------------## cross_corr_index = CrossCorrelationIndex() cross_corr_h5_path = h5.path_for(self.storage_path, CrossCorrelationH5, cross_corr_index.gid) cross_corr_h5 = CrossCorrelationH5(cross_corr_h5_path) node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: small_ts.sample_period = ts_h5.sample_period.load() small_ts.sample_period_unit = ts_h5.sample_period_unit.load() partial_cross_corr = None for var in range(self.input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = ts_h5.read_data_slice(tuple(node_slice)) partial_cross_corr = self._compute_cross_correlation(small_ts, ts_h5) cross_corr_h5.write_data_slice(partial_cross_corr) partial_cross_corr.source.gid = view_model.time_series partial_cross_corr.gid = uuid.UUID(cross_corr_index.gid) cross_corr_index.fill_from_has_traits(partial_cross_corr) self.fill_index_from_h5(cross_corr_index, cross_corr_h5) cross_corr_h5.store(partial_cross_corr, scalars_only=True) cross_corr_h5.close() return cross_corr_index
def launch(self, time_series): """ Launch algorithm and build results. :returns: the `ComplexCoherenceSpectrum` built with the given time-series """ shape = time_series.read_data_shape() ##------- Prepare a ComplexCoherenceSpectrum object for result -------## spectra = ComplexCoherenceSpectrum(source=time_series, storage_path=self.storage_path) ##------------------- NOTE: Assumes 4D TimeSeries. -------------------## node_slice = [ slice(shape[0]), slice(shape[1]), slice(shape[2]), slice(shape[3]) ] ##---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries(use_storage=False) small_ts.sample_rate = time_series.sample_rate small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_result = self.algorithm.evaluate() LOG.debug("got partial_result") LOG.debug("partial segment_length is %s" % (str(partial_result.segment_length))) LOG.debug("partial epoch_length is %s" % (str(partial_result.epoch_length))) LOG.debug("partial windowing_function is %s" % (str(partial_result.windowing_function))) #LOG.debug("partial frequency vector is %s" % (str(partial_result.frequency))) spectra.write_data_slice(partial_result) spectra.segment_length = partial_result.segment_length spectra.epoch_length = partial_result.epoch_length spectra.windowing_function = partial_result.windowing_function #spectra.frequency = partial_result.frequency spectra.close_file() return spectra
def launch(self, view_model): # type: (ICAAdapterModel) -> [IndependentComponentsIndex] """ Launch algorithm and build results. """ # --------- Prepare a IndependentComponents object for result ----------## ica_index = IndependentComponentsIndex() ica_index.fk_source_gid = view_model.time_series.hex time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) result_path = h5.path_for(self.storage_path, IndependentComponentsH5, ica_index.gid) ica_h5 = IndependentComponentsH5(path=result_path) ica_h5.gid.store(uuid.UUID(ica_index.gid)) ica_h5.source.store(view_model.time_series) ica_h5.n_components.store(self.algorithm.n_components) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_ica = self.algorithm.evaluate() ica_h5.write_data_slice(partial_ica) array_metadata = ica_h5.unmixing_matrix.get_cached_metadata() ica_index.array_has_complex = array_metadata.has_complex ica_index.shape = json.dumps(ica_h5.unmixing_matrix.shape) ica_index.ndim = len(ica_h5.unmixing_matrix.shape) ica_h5.close() time_series_h5.close() return ica_index
def launch(self, view_model): # type: (ICAAdapterModel) -> [IndependentComponentsIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the ica index for the specified time series """ # --------------------- Prepare result entities ---------------------## ica_index = IndependentComponentsIndex() result_path = h5.path_for(self.storage_path, IndependentComponentsH5, ica_index.gid) ica_h5 = IndependentComponentsH5(path=result_path) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) partial_ica = compute_ica_decomposition(small_ts, view_model.n_components) ica_h5.write_data_slice(partial_ica) time_series_h5.close() partial_ica.source.gid = view_model.time_series partial_ica.gid = uuid.UUID(ica_index.gid) ica_h5.store(partial_ica, scalars_only=True) ica_h5.close() ica_index.fill_from_has_traits(partial_ica) return ica_index
def launch(self, view_model): # type: (NodeCovarianceAdapterModel) -> [CovarianceIndex] """ Launch algorithm and build results. :returns: the `CovarianceIndex` built with the given time_series index as source """ # Create an index for the computed covariance. covariance_index = CovarianceIndex() covariance_h5_path = h5.path_for(self.storage_path, CovarianceH5, covariance_index.gid) covariance_h5 = CovarianceH5(covariance_h5_path) # NOTE: Assumes 4D, Simulator timeSeries. node_slice = [ slice(self.input_shape[0]), None, slice(self.input_shape[2]), None ] with h5.h5_file_for_index(self.input_time_series_index) as ts_h5: for mode in range(self.input_shape[3]): for var in range(self.input_shape[1]): small_ts = TimeSeries() node_slice[1] = slice(var, var + 1) node_slice[3] = slice(mode, mode + 1) small_ts.data = ts_h5.read_data_slice(tuple(node_slice)) partial_cov = self._compute_node_covariance( small_ts, ts_h5) covariance_h5.write_data_slice(partial_cov.array_data) ts_array_metadata = covariance_h5.array_data.get_cached_metadata() covariance_index.source_gid = self.input_time_series_index.gid covariance_index.subtype = type(covariance_index).__name__ covariance_index.array_data_min = ts_array_metadata.min covariance_index.array_data_max = ts_array_metadata.max covariance_index.array_data_mean = ts_array_metadata.mean covariance_index.ndim = len(covariance_h5.array_data.shape) covariance_h5.gid.store(uuid.UUID(covariance_index.gid)) covariance_h5.source.store(view_model.time_series) covariance_h5.close() return covariance_index
def create_time_series(self, connectivity=None, surface=None, region_map=None, region_volume_map=None): """ Create a time series instance that will be populated by this monitor :param surface: if present a TimeSeriesSurface is returned :param connectivity: if present a TimeSeriesRegion is returned Otherwise a plain TimeSeries will be returned """ if surface is not None: return TimeSeriesSurface(surface=surface.region_mapping_data.surface, sample_period=self.period, title='Surface ' + self.__class__.__name__) if connectivity is not None: return TimeSeriesRegion(connectivity=connectivity, region_mapping=region_map, region_mapping_volume=region_volume_map, sample_period=self.period, title='Regions ' + self.__class__.__name__) return TimeSeries(sample_period=self.period, title=' ' + self.__class__.__name__)
def launch(self, view_model): # type: (BalloonModelAdapterModel) -> [TimeSeriesRegionIndex] """ Launch algorithm and build results. :param time_series: the input time-series used as neural activation in the Balloon Model :returns: the simulated BOLD signal :rtype: `TimeSeries` """ input_time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) time_line = input_time_series_h5.read_time_page(0, self.input_shape[0]) bold_signal_index = TimeSeriesRegionIndex() bold_signal_h5_path = h5.path_for(self.storage_path, TimeSeriesRegionH5, bold_signal_index.gid) bold_signal_h5 = TimeSeriesRegionH5(bold_signal_h5_path) bold_signal_h5.gid.store(uuid.UUID(bold_signal_index.gid)) self._fill_result_h5(bold_signal_h5, input_time_series_h5) # ---------- Iterate over slices and compose final result ------------## node_slice = [slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3])] small_ts = TimeSeries() small_ts.sample_period = self.input_time_series_index.sample_period small_ts.sample_period_unit = self.input_time_series_index.sample_period_unit small_ts.time = time_line for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = input_time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_bold = self.algorithm.evaluate() bold_signal_h5.write_data_slice_on_grow_dimension(partial_bold.data, grow_dimension=2) bold_signal_h5.write_time_slice(time_line) bold_signal_shape = bold_signal_h5.data.shape bold_signal_h5.nr_dimensions.store(len(bold_signal_shape)) bold_signal_h5.close() input_time_series_h5.close() self._fill_result_index(bold_signal_index, bold_signal_shape) return bold_signal_index
def launch(self, view_model): # type: (PCAAdapterModel) -> [PrincipalComponentsIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the `PrincipalComponentsIndex` object built with the given timeseries as source """ # --------------------- Prepare result entities ----------------------## principal_components_index = PrincipalComponentsIndex() dest_path = h5.path_for(self.storage_path, PrincipalComponentsH5, principal_components_index.gid) pca_h5 = PrincipalComponentsH5(path=dest_path) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.time_series = small_ts.gid partial_pca = compute_pca(small_ts) pca_h5.write_data_slice(partial_pca) time_series_h5.close() partial_pca.source.gid = view_model.time_series partial_pca.gid = uuid.UUID(principal_components_index.gid) principal_components_index.fill_from_has_traits(partial_pca) pca_h5.store(partial_pca, scalars_only=True) pca_h5.close() return principal_components_index
def launch(self, view_model): # type: (NodeCoherenceModel) -> [CoherenceSpectrumIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the node coherence for the specified time series """ # -------------------- Prepare result entities -----------------------## coherence_spectrum_index = CoherenceSpectrumIndex() dest_path = self.path_for(CoherenceSpectrumH5, coherence_spectrum_index.gid) coherence_h5 = CoherenceSpectrumH5(dest_path) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) input_shape = time_series_h5.data.shape node_slice = [slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3])] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() small_ts.sample_period = time_series_h5.sample_period.load() small_ts.sample_period_unit = time_series_h5.sample_period_unit.load() partial_coh = None for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) partial_coh = calculate_cross_coherence(small_ts, view_model.nfft) coherence_h5.write_data_slice(partial_coh) time_series_h5.close() partial_coh.source.gid = view_model.time_series partial_coh.gid = uuid.UUID(coherence_spectrum_index.gid) coherence_spectrum_index.fill_from_has_traits(partial_coh) self.fill_index_from_h5(coherence_spectrum_index, coherence_h5) coherence_h5.store(partial_coh, scalars_only=True) coherence_h5.frequency.store(partial_coh.frequency) coherence_h5.close() return coherence_spectrum_index
def build(): time = numpy.linspace(0, 1000, 4000) data = numpy.zeros((time.size, 1, 3, 1)) data[:, 0, 0, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 40) data[:, 0, 1, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 200) data[:, 0, 2, 0] = numpy.sin(2 * numpy.pi * time / 1000.0 * 100) + numpy.sin(2 * numpy.pi * time / 1000.0 * 300) ts = TimeSeries(time=time, data=data, sample_period=1.0 / 4000) op = operation_factory() ts_db = TimeSeriesIndex() ts_db.fk_from_operation = op.id ts_db.fill_from_has_traits(ts) ts_h5_path = h5.path_for_stored_index(ts_db) with TimeSeriesH5(ts_h5_path) as f: f.store(ts) session.add(ts_db) session.commit() return ts_db
def create_ICA(self, timeseries): """ :returns: persisted entity IndependentComponents """ operation, _, storage_path = self.__create_operation() partial_ts = TimeSeries(use_storage=False) partial_ts.data = numpy.random.random((10, 10, 10, 10)) partial_ica = IndependentComponents( source=partial_ts, component_time_series=numpy.random.random((10, 10, 10, 10)), prewhitening_matrix=numpy.random.random((10, 10, 10, 10)), unmixing_matrix=numpy.random.random((10, 10, 10, 10)), n_components=10, use_storage=False) ica = IndependentComponents(source=timeseries, n_components=10, storage_path=storage_path) ica.write_data_slice(partial_ica) adapter_instance = StoreAdapter([ica]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return ica
def launch(self, view_model): # type: (NodeCovarianceAdapterModel) -> [CovarianceIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the `CovarianceIndex` built with the given time_series index as source """ # -------------------- Prepare result entities ---------------------## covariance_index = CovarianceIndex() covariance_h5_path = h5.path_for(self.storage_path, CovarianceH5, covariance_index.gid) covariance_h5 = CovarianceH5(covariance_h5_path) # ------------ NOTE: Assumes 4D, Simulator timeSeries -------------## node_slice = [ slice(self.input_shape[0]), None, slice(self.input_shape[2]), None ] ts_h5 = h5.h5_file_for_index(self.input_time_series_index) for mode in range(self.input_shape[3]): for var in range(self.input_shape[1]): small_ts = TimeSeries() node_slice[1] = slice(var, var + 1) node_slice[3] = slice(mode, mode + 1) small_ts.data = ts_h5.read_data_slice(tuple(node_slice)) partial_cov = self._compute_node_covariance(small_ts, ts_h5) covariance_h5.write_data_slice(partial_cov.array_data) ts_h5.close() partial_cov.source.gid = view_model.time_series partial_cov.gid = uuid.UUID(covariance_index.gid) covariance_index.fill_from_has_traits(partial_cov) self.fill_index_from_h5(covariance_index, covariance_h5) covariance_h5.store(partial_cov, scalars_only=True) covariance_h5.close() return covariance_index
def launch(self, time_series, dt=None, bold_model=None, RBM=None, neural_input_transformation=None): """ Launch algorithm and build results. :param time_series: the input time-series used as neural activation in the Balloon Model :returns: the simulated BOLD signal :rtype: `TimeSeries` """ time_line = time_series.read_time_page(0, self.input_shape[0]) bold_signal = TimeSeriesRegion(storage_path=self.storage_path, sample_period=time_series.sample_period, start_time=time_series.start_time, connectivity=time_series.connectivity) ##---------- Iterate over slices and compose final result ------------## node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] small_ts = TimeSeries(use_storage=False, sample_period=time_series.sample_period, time=time_line) for node in range(self.input_shape[2]): node_slice[2] = slice(node, node + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_bold = self.algorithm.evaluate() bold_signal.write_data_slice(partial_bold.data, grow_dimension=2) bold_signal.write_time_slice(time_line) bold_signal.close_file() return bold_signal
def launch(self, time_series): """ Launch algorithm and build results. """ #Create a FourierSpectrum dataType object. covariance = Covariance(source = time_series, storage_path = self.storage_path) #NOTE: Assumes 4D, Simulator timeSeries. node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), None] for mode in range(self.input_shape[3]): for var in range(self.input_shape[1]): small_ts = TimeSeries(use_storage=False) node_slice[1] = slice(var, var + 1) node_slice[3] = slice(mode, mode + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_cov = self.algorithm.evaluate() covariance.write_data_slice(partial_cov.array_data) covariance.close_file() return covariance
def launch(self, view_model): # type: (PCAAdapterModel) -> [PrincipalComponentsIndex] """ Launch algorithm and build results. :returns: the `PrincipalComponents` object built with the given timeseries as source """ # --------- Prepare a PrincipalComponents object for result ----------## principal_components_index = PrincipalComponentsIndex() principal_components_index.fk_source_gid = view_model.time_series.hex time_series_h5 = h5.h5_file_for_index(self.input_time_series_index) dest_path = h5.path_for(self.storage_path, PrincipalComponentsH5, principal_components_index.gid) pca_h5 = PrincipalComponentsH5(path=dest_path) pca_h5.source.store(time_series_h5.gid.load()) pca_h5.gid.store(uuid.UUID(principal_components_index.gid)) # ------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## input_shape = time_series_h5.data.shape node_slice = [ slice(input_shape[0]), None, slice(input_shape[2]), slice(input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries() for var in range(input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series_h5.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_pca = self.algorithm.evaluate() pca_h5.write_data_slice(partial_pca) pca_h5.close() time_series_h5.close() return principal_components_index
def launch(self, time_series, n_components=None): """ Launch algorithm and build results. """ ##--------- Prepare a IndependentComponents object for result ----------## ica_result = IndependentComponents(source = time_series, n_components = int(self.algorithm.n_components), storage_path = self.storage_path) ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])] ##---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries(use_storage=False) for var in range(self.input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_ica = self.algorithm.evaluate() ica_result.write_data_slice(partial_ica) ica_result.close_file() return ica_result
def _create_timeseries(self): """Launch adapter to persist a TimeSeries entity""" activity_data = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]) time_data = numpy.array([1, 2, 3]) storage_path = FilesHelper().get_project_folder(self.test_project) time_series = TimeSeries(time_files=None, activity_files=None, max_chunk=10, maxes=None, mins=None, data_shape=numpy.shape(activity_data), storage_path=storage_path, label_y="Time", time_data=time_data, data_name='TestSeries', activity_data=activity_data, sample_period=10.0) self._store_entity(time_series, "TimeSeries", "tvb.datatypes.time_series") timeseries_count = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.time_series.TimeSeries")[1] assert timeseries_count == 1, "Should be only one TimeSeries"
def _import(self, import_file_path=None): """ This method is used for importing data in NIFIT format :param import_file_path: absolute path of the file to be imported """ ### Retrieve Adapter instance group = dao.find_group('tvb.adapters.uploaders.nifti_importer', 'NIFTIImporter') importer = ABCAdapter.build_adapter(group) args = {'data_file': import_file_path, DataTypeMetaData.KEY_SUBJECT: "bla bla", 'apply_corrections': False, 'connectivity': None} ### Launch import Operation FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args) time_series = TimeSeries() data_types = FlowService().get_available_datatypes(self.test_project.id, time_series.module + "." + time_series.type)[0] self.assertEqual(1, len(data_types), "Project should contain only one data type.") time_series = ABCAdapter.load_entity_by_gid(data_types[0][2]) self.assertTrue(time_series is not None, "TimeSeries should not be none") return time_series
def launch(self, time_series, algorithms=None, start_point=None, segment=None): """ Launch algorithm and build results. :param time_series: the time series on which the algorithms are run :param algorithms: the algorithms to be run for computing measures on the time series :type algorithms: any subclass of BaseTimeseriesMetricAlgorithm (KuramotoIndex, GlobalVariance, VarianceNodeVariance) :rtype: `DatatypeMeasure` """ if algorithms is None: algorithms = self.available_algorithms.keys() shape = time_series.read_data_shape() log_debug_array(LOG, time_series, "time_series") metrics_results = {} for algorithm_name in algorithms: ##------------- NOTE: Assumes 4D, Simulator timeSeries. --------------## node_slice = [ slice(shape[0]), slice(shape[1]), slice(shape[2]), slice(shape[3]) ] ##---------- Iterate over slices and compose final result ------------## unstored_ts = TimeSeries(use_storage=False) unstored_ts.data = time_series.read_data_slice(tuple(node_slice)) ##-------------------- Fill Algorithm for Analysis -------------------## algorithm = self.available_algorithms[algorithm_name]( time_series=unstored_ts) if segment is not None: algorithm.segment = segment if start_point is not None: algorithm.start_point = start_point ## Validate that current algorithm's filter is valid. if (algorithm.accept_filter is not None and not algorithm. accept_filter.get_python_filter_equivalent(time_series)): LOG.warning( 'Measure algorithm will not be computed because of incompatibility on input. ' 'Filters failed on algo: ' + str(algorithm_name)) continue else: LOG.debug("Applying measure: " + str(algorithm_name)) unstored_result = algorithm.evaluate() ##----------------- Prepare a Float object(s) for result ----------------## if isinstance(unstored_result, dict): metrics_results.update(unstored_result) else: metrics_results[algorithm_name] = unstored_result result = DatatypeMeasure(analyzed_datatype=time_series, storage_path=self.storage_path, data_name=self._ui_name, metrics=metrics_results) return result
def launch(self, view_model): # type: (FFTAdapterModel) -> [FourierSpectrumIndex] """ Launch algorithm and build results. :param view_model: the ViewModel keeping the algorithm inputs :return: the fourier spectrum for the specified time series """ block_size = int(math.floor(self.input_shape[2] / self.memory_factor)) blocks = int(math.ceil(self.input_shape[2] / block_size)) input_time_series_h5 = h5.h5_file_for_index( self.input_time_series_index) # --------------------- Prepare result entities ---------------------- fft_index = FourierSpectrumIndex() dest_path = self.path_for(FourierSpectrumH5, fft_index.gid) spectra_file = FourierSpectrumH5(dest_path) # ------------- NOTE: Assumes 4D, Simulator timeSeries. -------------- node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------ small_ts = TimeSeries() small_ts.sample_period = input_time_series_h5.sample_period.load() small_ts.sample_period_unit = input_time_series_h5.sample_period_unit.load( ) for block in range(blocks): node_slice[2] = slice( block * block_size, min([(block + 1) * block_size, self.input_shape[2]]), 1) small_ts.data = input_time_series_h5.read_data_slice( tuple(node_slice)) partial_result = compute_fast_fourier_transform( small_ts, view_model.segment_length, view_model.window_function, view_model.detrend) if blocks <= 1 and len(partial_result.array_data) == 0: self.add_operation_additional_info( "Fourier produced empty result (most probably due to a very short input TimeSeries)." ) return None spectra_file.write_data_slice(partial_result) input_time_series_h5.close() # ---------------------------- Fill results ---------------------------- partial_result.source.gid = view_model.time_series partial_result.gid = uuid.UUID(fft_index.gid) fft_index.fill_from_has_traits(partial_result) self.fill_index_from_h5(fft_index, spectra_file) spectra_file.store(partial_result, scalars_only=True) spectra_file.windowing_function.store(view_model.window_function) spectra_file.close() self.log.debug("partial segment_length is %s" % (str(partial_result.segment_length))) return fft_index
def __init__(self, input=numpy.array([[], []]), **kwargs): if isinstance(input, (Timeseries, TimeSeries)): if isinstance(input, Timeseries): self._tvb = deepcopy(input._tvb) self.ts_type = str(input.ts_type) elif isinstance(input, TimeSeries): self._tvb = deepcopy(input) if isinstance(input, TimeSeriesRegion): self.ts_type = "Region" if isinstance(input, TimeSeriesSEEG): self.ts_type = "SEEG" elif isinstance(input, TimeSeriesEEG): self.ts_type = "EEG" elif isinstance(input, TimeSeriesMEG): self.ts_type = "MEG" elif isinstance(input, TimeSeriesEEG): self.ts_type = "EEG" elif isinstance(input, TimeSeriesVolume): self.ts_type = "Volume" elif isinstance(input, TimeSeriesSurface): self.ts_type = "Surface" else: self.ts_type = "" warning( "Input TimeSeries %s is not one of the known TVB TimeSeries classes!" % str(input)) for attr, value in kwargs.items(): try: setattr(self, attr, value) except: setattr(self._tvb, attr, value) elif isinstance(input, numpy.ndarray): input = prepare_4D(input, self.logger) time = kwargs.pop("time", None) if time is not None: start_time = float( kwargs.pop("start_time", kwargs.pop("start_time", time[0]))) sample_period = float( kwargs.pop( "sample_period", kwargs.pop("sample_period", numpy.mean(numpy.diff(time))))) kwargs.update({ "start_time": start_time, "sample_period": sample_period }) # Initialize self.ts_type = kwargs.pop("ts_type", "Region") labels_ordering = kwargs.get("labels_ordering", None) # Get input sensors if any input_sensors = None if isinstance(kwargs.get("sensors", None), (TVBSensors, Sensors)): if isinstance(kwargs["sensors"], Sensors): input_sensors = kwargs["sensors"]._tvb self.ts_type = "%s sensor" % input_sensors.sensors_type kwargs.update({"sensors": input_sensors}) else: input_sensors = kwargs["sensors"] # Create Timeseries if isinstance(input_sensors, TVBSensors) or \ self.ts_type in ["SEEG sensor", "Internal sensor", "EEG sensor", "MEG sensor"]: # ...for Sensor Timeseries if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "%s sensor" % self.ts_type kwargs.update({"labels_ordering": labels_ordering}) if isinstance(input_sensors, TVBSensorsInternal) or isequal_string(self.ts_type, "Internal sensor")\ or isequal_string(self.ts_type, "SEEG sensor"): self._tvb = TimeSeriesSEEG(data=input, **kwargs) self.ts_type = "SEEG sensor" elif isinstance(input_sensors, TVBSensorsEEG) or isequal_string( self.ts_type, "EEG sensor"): self._tvb = TimeSeriesEEG(data=input, **kwargs) self.ts_type = "EEG sensor" elif isinstance(input_sensors, TVBSensorsMEG) or isequal_string( self.ts_type, "MEG sensor"): self._tvb = TimeSeriesMEG(data=input, **kwargs) self.ts_type = "MEG sensor" else: raise_value_error( "Not recognizing sensors of type %s:\n%s" % (self.ts_type, str(input_sensors))) else: input_surface = kwargs.pop("surface", None) if isinstance( input_surface, (Surface, TVBSurface)) or self.ts_type == "Surface": self.ts_type = "Surface" if isinstance(input_surface, Surface): kwargs.update({"surface": input_surface._tvb}) else: kwargs.update({"surface": input_surface}) if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "Vertex" kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesSurface(data=input, **kwargs) elif isequal_string(self.ts_type, "Region"): if labels_ordering is None: labels_ordering = LABELS_ORDERING labels_ordering[2] = "Region" kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesRegion(data=input, **kwargs) # , **kwargs elif isequal_string(self.ts_type, "Volume"): if labels_ordering is None: labels_ordering = ["Time", "X", "Y", "Z"] kwargs.update({"labels_ordering": labels_ordering}) self._tvb = TimeSeriesVolume(data=input, **kwargs) else: self._tvb = TimeSeries(data=input, **kwargs) if not numpy.all([ dim_label in self._tvb.labels_dimensions.keys() for dim_label in self._tvb.labels_ordering ]): warning( "Lack of correspondance between timeseries labels_ordering %s\n" "and labels_dimensions!: %s" % (self._tvb.labels_ordering, self._tvb.labels_dimensions.keys())) self._tvb.configure() self.configure_time() self.configure_sample_rate() if len(self.title) == 0: self._tvb.title = "%s Time Series" % self.ts_type
def make_harmonic_ts(): return TimeSeries(title='harmonic', labels_ordering=('time', 'statevar', 'space'), labels_dimensions={'statevar': ['position', 'speed']}, start_time=0.0, sample_period=0.5)
class NodeComplexCoherence(core.Type): """ A class for calculating the FFT of a TimeSeries and returning a ComplexCoherenceSpectrum datatype. This algorithm is based on the matlab function data2cs_event.m written by Guido Nolte: .. [Freyer_2012] Freyer, F.; Reinacher, M.; Nolte, G.; Dinse, H. R. and Ritter, P. *Repetitive tactile stimulation changes resting-state functional connectivity-implications for treatment of sensorimotor decline*. Front Hum Neurosci, Bernstein Focus State Dependencies of Learning and Bernstein Center for Computational Neuroscience Berlin, Germany., 2012, 6, 144 Input: originally the input could be 2D (tpts x nodes/channels), and it was possible to give a 3D array (e.g., tpspt x nodes/cahnnels x trials) via the segment_length attribute. Current TVB implementation can handle 4D or 2D TimeSeries datatypes. Be warned: the 4D TimeSeries will be averaged and squeezed. Output: (main arrays) - the cross-spectrum - the complex coherence, from which the imaginary part can be extracted By default the time series is segmented into 1 second `epoch` blocks and 0.5 second 50% overlapping `segments` to which a Hanning function is applied. """ time_series = TimeSeries( label="Time Series", required=True, doc="""The timeseries for which the CrossCoherence and ComplexCoherence is to be computed.""") epoch_length = basic.Float( label="Epoch length [ms]", default=1000.0, order=-1, required=False, doc="""In general for lengthy EEG recordings (~30 min), the timeseries are divided into equally sized segments (~ 20-40s). These contain the event that is to be characterized by means of the cross coherence. Additionally each epoch block will be further divided into segments to which the FFT will be applied.""") segment_length = basic.Float( label="Segment length [ms]", default=500.0, order=-1, required=False, doc="""The timeseries can be segmented into equally sized blocks (overlapping if necessary). The segement length determines the frequency resolution of the resulting power spectra -- longer windows produce finer frequency resolution. """) segment_shift = basic.Float( label="Segment shift [ms]", default=250.0, required=False, order=-1, doc="""Time length by which neighboring segments are shifted. e.g. `segment shift` = `segment_length` / 2 means 50% overlapping segments.""") window_function = basic.String( label="Windowing function", default='hanning', required=False, order=-1, doc="""Windowing functions can be applied before the FFT is performed. Default is `hanning`, possibilities are: 'hamming'; 'bartlett'; 'blackman'; and 'hanning'. See, numpy.<function_name>.""") average_segments = basic.Bool( label="Average across segments", default=True, required=False, order=-1, doc="""Flag. If `True`, compute the mean Cross Spectrum across segments.""") subtract_epoch_average = basic.Bool( label="Subtract average across epochs", default=True, required=False, order=-1, doc="""Flag. If `True` and if the number of epochs is > 1, you can optionally subtract the mean across epochs before computing the complex coherence.""") zeropad = basic.Integer( label="Zeropadding", default=0, required=False, order=-1, doc="""Adds `n` zeros at the end of each segment and at the end of window_function. It is not yet functional.""") detrend_ts = basic.Bool( label="Detrend time series", default=False, required=False, order=-1, doc="""Flag. If `True` removes linear trend along the time dimension before applying FFT.""") max_freq = basic.Float( label="Maximum frequency", default=1024.0, order=-1, required=False, doc="""Maximum frequency points (e.g. 32., 64., 128.) represented in the output. Default is segment_length / 2 + 1.""") npat = basic.Float( label="dummy variable", default=1.0, required=False, order=-1, doc="""This attribute appears to be related to an input projection matrix... Which is not yet implemented""") def evaluate(self): """ Calculate the FFT, Cross Coherence and Complex Coherence of time_series broken into (possibly) epochs and segments of length `epoch_length` and `segment_length` respectively, filtered by `window_function`. """ cls_attr_name = self.__class__.__name__ + ".time_series" self.time_series.trait["data"].log_debug(owner=cls_attr_name) tpts = self.time_series.data.shape[0] time_series_length = tpts * self.time_series.sample_period if len(self.time_series.data.shape) > 2: time_series_data = numpy.squeeze( (self.time_series.data.mean(axis=-1)).mean(axis=1)) #nchan = time_series_data.shape[1] #NOTE: if we get a projection matrix ... then ... #if self.npat > 1: # data = data * proj # nchan = self.npat #Divide time-series into epochs, no overlapping if self.epoch_length > 0.0: nepochs = int(numpy.floor(time_series_length / self.epoch_length)) epoch_tpts = self.epoch_length / self.time_series.sample_period time_series_length = self.epoch_length tpts = epoch_tpts else: self.epoch_length = time_series_length nepochs = int(numpy.ceil(time_series_length / self.epoch_length)) #Segment time-series, overlapping if necessary nseg = int(numpy.floor(time_series_length / self.segment_length)) if nseg > 1: seg_tpts = self.segment_length / self.time_series.sample_period seg_shift_tpts = self.segment_shift / self.time_series.sample_period nseg = int(numpy.floor((tpts - seg_tpts) / seg_shift_tpts) + 1) else: self.segment_length = time_series_length seg_tpts = time_series_data.shape[0] # Frequency vectors freqs = numpy.fft.fftfreq(int(seg_tpts)) nfreq = numpy.min( [self.max_freq, numpy.floor((seg_tpts + self.zeropad) / 2.0) + 1]) freqs = freqs[0:nfreq, ] * (1.0 / self.time_series.sample_period) result_shape, av_result_shape = self.result_shape( self.time_series.data.shape, self.max_freq, self.epoch_length, self.segment_length, self.segment_shift, self.time_series.sample_period, self.zeropad, self.average_segments) cs = numpy.zeros(result_shape, dtype=numpy.complex128) av = numpy.matrix(numpy.zeros(av_result_shape, dtype=numpy.complex128)) coh = numpy.zeros(result_shape, dtype=numpy.complex128) # NOTE: result for individual epochs are kept only if npat > 1. Skipping ... #if self.npat > 1: # if not self.average_segments: # cs = numpy.zeros((nchan, nchan, nfreq, nepochs, nseg), dtype=numpy.complex128) # av = numpy.zeros((nchan, nfreq, nepochs, nseg), dtype=numpy.complex128) # else: # av = numpy.zeros((nchan, nfreq, nepochs), dtype=numpy.complex128) # cs = numpy.zeros((nchan, nchan, nfreq, nepochs), dtype=numpy.complex128) #Apply windowing function if self.window_function is not None: if self.window_function not in SUPPORTED_WINDOWING_FUNCTIONS: LOG.error("Windowing function is: %s" % self.window_function) LOG.error("Must be in: %s" % str(SUPPORTED_WINDOWING_FUNCTIONS)) window_function = eval("".join(("numpy.", self.window_function))) win = window_function(seg_tpts) window_mask = (numpy.kron( numpy.ones((time_series_data.shape[1], 1)), win)).T nave = 0 for j in numpy.arange(nepochs): data = time_series_data[j * epoch_tpts:(j + 1) * epoch_tpts, :] for i in numpy.arange(nseg): #average over all segments; time_series = data[i * seg_shift_tpts:i * seg_shift_tpts + seg_tpts, :] if self.detrend_ts: time_series = sp_signal.detrend(time_series, axis=0) datalocfft = numpy.fft.fft(time_series * window_mask, axis=0) datalocfft = numpy.matrix(datalocfft) for f in numpy.arange(nfreq): #for all frequencies if self.npat == 1: if not self.average_segments: cs[:, :, f, i] += numpy.conjugate( datalocfft[f, :].conj().T * \ datalocfft[f, :]) av[:, f, i] += numpy.conjugate(datalocfft[f, :].conj().T) else: cs[:, :, f] += numpy.conjugate( datalocfft[f,:].conj().T * \ datalocfft[f, :]) av[:, f] += numpy.conjugate(datalocfft[f, :].conj().T) else: if not self.average_segments: cs[:, :, f, j, i] = numpy.conjugate( datalocfft[f, :].conj().T * \ datalocfft[f, :]) av[:, f, j, i] = numpy.conjugate(datalocfft[f, :].conj().T) else: cs[:, :, f, j] += numpy.conjugate( datalocfft[f,:].conj().T *\ datalocfft[f,:]) av[:, f, j] += numpy.conjugate(datalocfft[f, :].conj().T) del datalocfft nave += 1.0 # End of FORs if not self.average_segments: cs = cs / nave av = av / nave else: nave = nave * nseg cs = cs / nave av = av / nave # Subtract average for f in numpy.arange(nfreq): if self.subtract_epoch_average: if self.npat == 1: if not self.average_segments: for i in numpy.arange(nseg): cs[:, :, f, i] = cs[:, :, f, i] - av[:, f, i] * av[:, f, i].conj().T else: cs[:, :, f] = cs[:, :, f] - av[:, f] * av[:, f].conj().T else: if not self.average_segments: for i in numpy.arange(nseg): for j in numpy.arange(nepochs): cs[:, :, f, j, i] = cs[:, :, f, j, i] - av[:, f, j, i] * av[:, f, j, i].conj().T else: for j in numpy.arange(nepochs): cs[:, :, f, j] = cs[:, :, f, j] - av[:, f, j] * av[:, f, j].conj().T #Compute Complex Coherence ndim = len(cs.shape) if ndim == 3: for i in numpy.arange(cs.shape[2]): temp = numpy.matrix(cs[:, :, i]) coh[:, :, i] = cs[:, :, i] / numpy.sqrt( (temp.diagonal().conj().T) * temp.diagonal()) elif ndim == 4: for i in numpy.arange(cs.shape[2]): for j in numpy.arange(cs.shape[3]): temp = numpy.matrix(numpy.squeeze(cs[:, :, i, j])) coh[:, :, i, j] = temp / numpy.sqrt( (temp.diagonal().conj().T) * temp.diagonal().T) util.log_debug_array(LOG, cs, "result") spectra = spectral.ComplexCoherenceSpectrum( source=self.time_series, array_data=coh, cross_spectrum=cs, # frequency = freqs, epoch_length=self.epoch_length, segment_length=self.segment_length, windowing_function=self.window_function, # fft_points = seg_tpts, use_storage=False) return spectra @staticmethod def result_shape(input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments): """ Returns the shape of the main result and the average over epochs """ # this is useless here unless the input could actually be a 2D timeseries nchan = numpy.where( len(input_shape) > 2, input_shape[2], input_shape[1]) seg_tpts = segment_length / sample_period seg_shift_tpts = segment_shift / sample_period tpts = numpy.where(epoch_length > 0.0, epoch_length / sample_period, input_shape[0]) nfreq = numpy.min( [max_freq, numpy.floor((seg_tpts + zeropad) / 2.0) + 1]) #nep = int(numpy.floor(input_shape[0] / epoch_length)) nseg = int(numpy.floor((tpts - seg_tpts) / seg_shift_tpts) + 1) if not average_segments: result_shape = (nchan, nchan, nfreq, nseg) av_result_shape = (nchan, nfreq, nseg) else: result_shape = (nchan, nchan, nfreq) av_result_shape = (nchan, nfreq) return [result_shape, av_result_shape] def result_size(self, input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments): """ Returns the storage size in Bytes of the main result (complex array) of the ComplexCoherence """ result_size = numpy.prod( self.result_shape(input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments)[0]) * 2.0 * 8.0 #complex*Bytes return result_size def extended_result_size(self, input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments): """ Returns the storage size in Bytes of the extended result of the ComplexCoherence. That is, it includes storage of the evaluated ComplexCoherence attributes such as ... """ result_shape = self.result_shape(input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments)[0] result_size = self.result_size(input_shape, max_freq, epoch_length, segment_length, segment_shift, sample_period, zeropad, average_segments) extend_size = result_size * 2.0 #Main arrays: cross spectrum and complex coherence extend_size = extend_size + result_shape[2] * 8.0 #Frequency extend_size = extend_size + 8.0 # Epoch length extend_size = extend_size + 8.0 # Segment length return extend_size
def launch(self, view_model): # type: (FFTAdapterModel) -> [FourierSpectrumIndex] """ Launch algorithm and build results. :param time_series: the input time series to which the fft is to be applied :param segment_length: the block size which determines the frequency resolution \ of the resulting power spectra :param window_function: windowing functions can be applied before the FFT is performed :type window_function: None; ‘hamming’; ‘bartlett’; ‘blackman’; ‘hanning’ :returns: the fourier spectrum for the specified time series :rtype: `FourierSpectrumIndex` """ fft_index = FourierSpectrumIndex() fft_index.fk_source_gid = view_model.time_series.hex block_size = int(math.floor(self.input_shape[2] / self.memory_factor)) blocks = int(math.ceil(self.input_shape[2] / block_size)) input_time_series_h5 = h5.h5_file_for_index( self.input_time_series_index) dest_path = h5.path_for(self.storage_path, FourierSpectrumH5, fft_index.gid) spectra_file = FourierSpectrumH5(dest_path) spectra_file.gid.store(uuid.UUID(fft_index.gid)) spectra_file.source.store(uuid.UUID(self.input_time_series_index.gid)) # ------------- NOTE: Assumes 4D, Simulator timeSeries. -------------- node_slice = [ slice(self.input_shape[0]), slice(self.input_shape[1]), None, slice(self.input_shape[3]) ] # ---------- Iterate over slices and compose final result ------------ small_ts = TimeSeries() small_ts.sample_period = input_time_series_h5.sample_period.load() for block in range(blocks): node_slice[2] = slice( block * block_size, min([(block + 1) * block_size, self.input_shape[2]]), 1) small_ts.data = input_time_series_h5.read_data_slice( tuple(node_slice)) self.algorithm.time_series = small_ts partial_result = self.algorithm.evaluate() if blocks <= 1 and len(partial_result.array_data) == 0: self.add_operation_additional_info( "Fourier produced empty result (most probably due to a very short input TimeSeries)." ) return None spectra_file.write_data_slice(partial_result) fft_index.ndim = len(spectra_file.array_data.shape) input_time_series_h5.close() fft_index.windowing_function = self.algorithm.window_function fft_index.segment_length = self.algorithm.segment_length fft_index.detrend = self.algorithm.detrend fft_index.frequency_step = partial_result.freq_step fft_index.max_frequency = partial_result.max_freq spectra_file.segment_length.store(self.algorithm.segment_length) spectra_file.windowing_function.store( str(self.algorithm.window_function)) spectra_file.close() self.log.debug("partial segment_length is %s" % (str(partial_result.segment_length))) return fft_index