def launch(self, time_series): """ Launch algorithm and build results. :param time_series: the input time series for which the correlation should be computed :returns: the cross correlation for the given time series :rtype: `CrossCorrelation` """ ##--------- Prepare a CrossCorrelation object for result ------------## cross_corr = CrossCorrelation(source=time_series, storage_path=self.storage_path) node_slice = [slice(self.input_shape[0]), None, slice(self.input_shape[2]), slice(self.input_shape[3])] ##---------- Iterate over slices and compose final result ------------## small_ts = TimeSeries(use_storage=False) small_ts.sample_period = time_series.sample_period partial_cross_corr = None for var in range(self.input_shape[1]): node_slice[1] = slice(var, var + 1) small_ts.data = time_series.read_data_slice(tuple(node_slice)) self.algorithm.time_series = small_ts partial_cross_corr = self.algorithm.evaluate() cross_corr.write_data_slice(partial_cross_corr) cross_corr.time = partial_cross_corr.time cross_corr.labels_ordering[1] = time_series.labels_ordering[2] cross_corr.labels_ordering[2] = time_series.labels_ordering[2] cross_corr.close_file() return cross_corr
def create_crosscorrelation(self, time_series): """ :returns: `CrossCorrelation` stored entity. """ operation, _, storage_path = self.__create_operation() partial_corr = CrossCorrelation(array_data=numpy.random.random((10, 10, 10, 10, 10)), use_storage=False) crossc = CrossCorrelation(source=time_series, storage_path=storage_path, time=range(10)) crossc.write_data_slice(partial_corr) crossc.close_file() adapter_instance = StoreAdapter([crossc]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) return crossc
def _compute_cross_correlation(self, small_ts, input_ts_h5): """ Cross-correlate two one-dimensional arrays. Return a CrossCorrelation datatype with result. """ # (tpts, nodes, nodes, state-variables, modes) result_shape = self._result_shape(small_ts.data.shape) self.log.info("result shape will be: %s" % str(result_shape)) result = numpy.zeros(result_shape) # TODO: For region level, 4s, 2000Hz, this takes ~3hours...(which makes node_coherence seem positively speedy # Probably best to add a keyword for offsets, so we just compute +- some "small" range... # One inter-node correlation, across offsets, for each state-var & mode. for mode in range(result_shape[4]): for var in range(result_shape[3]): data = input_ts_h5.data[:, var, :, mode] data = data - data.mean(axis=0)[numpy.newaxis, :] # TODO: Work out a way around the 4 level loop: for n1 in range(result_shape[1]): for n2 in range(result_shape[2]): result[:, n1, n2, var, mode] = correlate(data[:, n1], data[:, n2], mode="same") self.log.debug("result") self.log.debug(narray_describe(result)) offset = (small_ts.sample_period * numpy.arange(-numpy.floor(result_shape[0] / 2.0), numpy.ceil(result_shape[0] / 2.0))) cross_corr = CrossCorrelation(source=small_ts, array_data=result, time=offset) return cross_corr