def dff(t, data3D, dimOrd, method, tBaseMin=None, tBaseMax=None):
    if method == 'raw':
        return data3D
    else:
        dimOrdCanon = 'psr'
        data3DCanon = numpy_transpose_byorder(data3D.copy(), dimOrd,
                                              dimOrdCanon)

        tBaseMin = tBaseMin if tBaseMin is not None else np.min(t) - 1
        tBaseMax = tBaseMax if tBaseMax is not None else np.max(t)

        timeIdxsBaseline = np.logical_and(t > tBaseMin, t <= tBaseMax)

        nChannel, _, nTrial = data3DCanon.shape

        if method == 'dff_session':
            for iChannel in range(nChannel):
                data3DCanon[iChannel] = dff_func(data3DCanon[iChannel],
                                                 timeIdxsBaseline)
        elif method == 'dff_trial':
            for iTrial in range(nTrial):
                for iChannel in range(nChannel):
                    data3DCanon[iChannel, :, iTrial] = dff_func(
                        data3DCanon[iChannel, :, iTrial], timeIdxsBaseline)
        else:
            raise ValueError('Unexpected method', method)

        return numpy_transpose_byorder(data3DCanon, dimOrdCanon, dimOrd)
Esempio n. 2
0
 def iterator(self):
     if "r" in self.dimOrderTrg:
         for data in self.dataLst:
             if "p" in self.dimOrderTrg:
                 for iProcess in range(self.shapeDict["p"]):
                     yield [
                         numpy_transpose_byorder(data[iProcess],
                                                 "s",
                                                 "ps",
                                                 augment=True)
                     ], {}
             else:
                 yield [data], {}
     else:
         if "p" in self.dimOrderTrg:
             for iProcess in range(self.shapeDict["p"]):
                 yield [
                     numpy_transpose_byorder(data[iProcess],
                                             "s",
                                             "ps",
                                             augment=True)
                     for data in self.dataLst
                 ], {}
         else:
             yield self.dataLst, {}
Esempio n. 3
0
def cross_corr_3D(data, settings):
    '''
    Compute cross-correlation of multivariate dataset for a fixed lag

    :param data: 2D or 3D matrix
    :param settings: A dictionary. 'min_lag_sources' and 'max_lag_sources' determine lag range.
    :param est: Estimator name. Can be 'corr' or 'spr' for cross-correlation or spearmann-rank
    :return: A matrix [nLag x nSource x nTarget]
    '''

    # Transpose dataset into comfortable form
    dataOrd = numpy_transpose_byorder(data, 'rps', 'psr')

    # Extract parameters
    # Extract parameters
    lag = settings['lag']
    nNode, nTime = dataOrd.shape[:2]

    # Check that number of timesteps is sufficient to estimate lagMax
    if nTime <= lag:
        raise ValueError('lag', lag,
                         'cannot be estimated for number of timesteps', nTime)

    xx = preprocess_3D(dataOrd[:, :nTime - lag], settings)
    yy = preprocess_3D(dataOrd[:, lag:], settings)

    # Only interested in x-y correlations, crop x-x and y-y
    return corr_2D(xx, yy, settings=settings)[:nNode, nNode:]
Esempio n. 4
0
def cross_mi_3D(data, settings):
    nTrial, nProcess, nSample = data.shape
    if nTrial*nSample < 2 * nProcess:
        # If there are too few samples, there is no point to calculate anything
        return np.full((nProcess, nProcess), np.nan)
    else:
        lag = settings['lag']

        # Check that number of timesteps is sufficient to estimate lagMax
        if nSample <= lag:
            raise ValueError('lag', lag, 'cannot be estimated for number of timesteps', nSample)

        # dataCanon = numpy_transpose_byorder(data, 'rps', 'srp')
        dataOrd = numpy_transpose_byorder(data, 'rps', 'psr')
        xx = numpy_merge_dimensions(dataOrd[:, :nSample-lag], 1, 3)
        yy = numpy_merge_dimensions(dataOrd[:, lag:], 1, 3)

        rez = np.zeros((nProcess, nProcess))
        if lag > 0:
            for i in range(nProcess):
                for j in range(nProcess):
                    rez[i][j] = ee.mi(xx[i], yy[j])
        else:
            # Optimization - take advantage of symmetry
            for i in range(nProcess):
                for j in range(i, nProcess):
                    rez[i][j] = ee.mi(xx[i], yy[j])
                    rez[j][i] = rez[i][j]

        return rez
Esempio n. 5
0
    def set_data(self, data, dimOrderSrc, timeWindow=None, zscoreDim=None):
        # Convert data to standard format, add fake missing dimensions
        dataCanon = numpy_transpose_byorder(data,
                                            dimOrderSrc,
                                            self.dimOrderCanon,
                                            augment=True)

        # extract params
        self.timeWindow = timeWindow

        # zscore whole data array if requested
        self.data = zscore_dim_ord(dataCanon, self.dimOrderCanon, zscoreDim)
def _preprocess_mar_inp(data, inp, nHist):
    x, y = splitter.split3D(data, nHist)

    assert inp.ndim == 3, "Input matrix must be a 3D matrix"
    assert np.prod(inp.shape) != 0, "Input matrix is degenerate"
    nTr, nCh, nT = data.shape
    nTrInp, nChInp, nTInp = inp.shape
    assert nTr == nTrInp, "Input shape must be consistent with data shape"
    assert nT == nTInp, "Input shape must be consistent with data shape"

    # Convert input into the form (rps) -> (r*s, p)
    inpCanon = numpy_transpose_byorder(inp, 'rps', 'rsp')
    u = numpy_merge_dimensions(inpCanon[:, nHist:], 0, 2)

    # Drop any nan rows that are present in the data or input
    return drop_nan_rows([x, y, u])
Esempio n. 7
0
    def _plain_iterator(self, data=None):
        '''
        Algorithm:
            1. Determine iterable axis sizes and settings sizes, list values
            2. Construct outer product of those values
            3. Iterate over values
              3.1 For each iteration, split values into data indices and settings params
              3.2 Obtain data slice for given indices
              3.3 Construct settings dict for given values
              3.4 Yield

        :return: Data slice for current iteration, as well as parameter value dictionary
        '''
        if data is None:
            data = self.data

        dimOrderSrcRemainder = unique_subtract(self.dimOrderSrc,
                                               self.dimOrderTrgPlainSweep)

        if len(self.iterInternalShape) == 0:
            yield data, {}
        else:
            outerIterator = non_uniform_base_arithmetic_iterator(
                self.iterInternalShape)

            for iND in outerIterator:
                iNDData = iND[:self.nAxisPlainIter]
                iNDSettings = iND[self.nAxisPlainIter:]

                # Take data along iterated axes, and augment fake axes instead, so that data is always the same shape
                dataThis = numpy_take_all(data, self.iterPlainAxis, iNDData)
                dataThis = numpy_transpose_byorder(dataThis,
                                                   dimOrderSrcRemainder,
                                                   self.dimOrderSrc,
                                                   augment=True)

                if self.settingsSweep is None:
                    yield dataThis, {}
                else:
                    extraSettings = {
                        k: v[iSett]
                        for iSett, (
                            k,
                            v) in zip(iNDSettings, self.settingsSweep.items())
                    }
                    yield dataThis, extraSettings
Esempio n. 8
0
    def read_neuro_files(self):
        if 'neuro' in self.metaDataFrames.keys():
            nNeuroFiles = self.metaDataFrames['neuro'].shape[0]

            self.dataNeuronal = []
            progBar = IntProgress(min=0, max=nNeuroFiles, description='Read Neuro Data:')
            display(progBar)  # display the bar
            for idx, row in self.metaDataFrames['neuro'].iterrows():
                matFile = loadmat(row['path'], waitRetry=3)
                for k, val in matFile.items():
                    if 'Hit_timecourse' in k:
                        data = numpy_transpose_byorder(val, 'psr', self.dimOrdCanon)
                        if 'yasir' not in row['datatype']:
                            data = self._allen_normalize_data(data)   # Normalize data, unless already normalized by Yasir

                        self.dataNeuronal += [data]
                    elif k == 'dp':
                        self.metaDataFrames['neuro'].at[idx, 'performance'] = val

                progBar.value += 1
        else:
            print("No Neuro files loaded, skipping reading part")
Esempio n. 9
0
def corr_3D(data, settings):
    # Convert to canonical form
    dataCanon = numpy_transpose_byorder(data, 'rps', 'psr')
    dataFlat = preprocess_3D(dataCanon, settings)

    return corr_2D(dataFlat, settings=settings)