def test_correlation_coefficients(self): data = numpy.random.random((13, 14)) ts = time_series.TimeSeries(data=data, title="test-ts") dt = graph.CorrelationCoefficients(source=ts, array_data=data) assert dt.array_data.shape == (13, 14) summary = dt.summary_info() assert summary['Graph type'] == "CorrelationCoefficients" assert summary['Source'] == "test-ts" assert summary['Dimensions'] == dt.labels_ordering assert dt.labels_ordering == ("Node", "Node", "State Variable", "Mode")
def test_correlation_coefficients(self): data = numpy.random.random((13, 14)) ts = time_series.TimeSeries(data=data, title="test-ts") dt = graph.CorrelationCoefficients(source=ts, array_data=data) self.assertEqual(dt.shape, (13, 14)) self.assertEqual(dt.array_data.shape, (13, 14)) summary = dt.summary_info self.assertEqual(summary['Graph type'], "CorrelationCoefficients") self.assertEqual(summary['Source'], "test-ts") self.assertEqual(summary['Dimensions'], dt.labels_ordering) self.assertEqual(dt.labels_ordering, ["Node", "Node", "State Variable", "Mode"])
def evaluate(self): """ Compute the correlation coefficients of a 2D array (tpts x nodes). Yields an array of size nodes x nodes x state-variables x modes. The time interval over which the correlation coefficients are computed is defined by t_start, t_end """ cls_attr_name = self.__class__.__name__ + ".time_series" self.time_series.trait["data"].log_debug(owner=cls_attr_name) #(nodes, nodes, state-variables, modes) input_shape = self.time_series.read_data_shape() result_shape = self.result_shape(input_shape) LOG.info("result shape will be: %s" % str(result_shape)) result = numpy.zeros(result_shape) t_lo = int((1. / self.time_series.sample_period) * (self.t_start - self.time_series.sample_period)) t_hi = int((1. / self.time_series.sample_period) * (self.t_end - self.time_series.sample_period)) t_lo = max(t_lo, 0) t_hi = max(t_hi, input_shape[0]) #One correlation coeff matrix, for each state-var & mode. for mode in range(result_shape[3]): for var in range(result_shape[2]): current_slice = tuple([ slice(t_lo, t_hi + 1), slice(var, var + 1), slice(input_shape[2]), slice(mode, mode + 1) ]) data = self.time_series.read_data_slice( current_slice).squeeze() result[:, :, var, mode] = numpy.corrcoef(data.T) util.log_debug_array(LOG, result, "result") corr_coeff = graph.CorrelationCoefficients(source=self.time_series, array_data=result, use_storage=False) return corr_coeff