Esempio n. 1
0
def epochs_distributed(ts,
                       variability=None,
                       threshold=0.0,
                       minlength=1.0,
                       plot=True):
    """Same as `epochs()`, but computes channels in parallel for speed.

    (Note: This requires an IPython cluster to be started first, 
           e.g. on a workstation type 'ipcluster start')

    Identify "stationary" epochs within a time series, based on a 
    continuous measure of variability.
    Epochs are defined to contain the points of minimal variability, and to 
    extend as wide as possible with variability not exceeding the threshold.

    Args:
      ts  Timeseries of m variables, shape (n, m). 
      variability  (optional) Timeseries of shape (n, m, q),  giving q scalar 
                   measures of the variability of timeseries `ts` near each 
                   point in time. (if None, we will use variability_fp())
                   Epochs require the mean of these to be below the threshold.
      threshold   The maximum variability permitted in stationary epochs.
      minlength   Shortest acceptable epoch length (in seconds)
      plot  bool  Whether to display the output

    Returns: (variability, allchannels_epochs) 
      variability: as above
      allchannels_epochs: (list of) list of tuples
      For each variable, a list of tuples (start, end) that give the 
      starting and ending indices of stationary epochs.
      (epochs are inclusive of start point but not the end point)
    """
    import distob
    if ts.ndim is 1:
        ts = ts[:, np.newaxis]
    if variability is None:
        dts = distob.scatter(ts, axis=1)
        vepochs = distob.vectorize(epochs)
        results = vepochs(dts, None, threshold, minlength, plot=False)
    else:

        def f(pair):
            return epochs(pair[0], pair[1], threshold, minlength, plot=False)

        allpairs = [(ts[:, i], variability[:, i]) for i in range(ts.shape[1])]
        vf = distob.vectorize(f)
        results = vf(allpairs)
    vars, allchannels_epochs = zip(*results)
    variability = distob.hstack(vars)
    if plot:
        _plot_variability(ts, variability, threshold, allchannels_epochs)
    return (variability, allchannels_epochs)
Esempio n. 2
0
def epochs_distributed(ts, variability=None, threshold=0.0, minlength=1.0, 
                       plot=True):
    """Same as `epochs()`, but computes channels in parallel for speed.

    (Note: This requires an IPython cluster to be started first, 
           e.g. on a workstation type 'ipcluster start')

    Identify "stationary" epochs within a time series, based on a 
    continuous measure of variability.
    Epochs are defined to contain the points of minimal variability, and to 
    extend as wide as possible with variability not exceeding the threshold.

    Args:
      ts  Timeseries of m variables, shape (n, m). 
      variability  (optional) Timeseries of shape (n, m, q),  giving q scalar 
                   measures of the variability of timeseries `ts` near each 
                   point in time. (if None, we will use variability_fp())
                   Epochs require the mean of these to be below the threshold.
      threshold   The maximum variability permitted in stationary epochs.
      minlength   Shortest acceptable epoch length (in seconds)
      plot  bool  Whether to display the output

    Returns: (variability, allchannels_epochs) 
      variability: as above
      allchannels_epochs: (list of) list of tuples
      For each variable, a list of tuples (start, end) that give the 
      starting and ending indices of stationary epochs.
      (epochs are inclusive of start point but not the end point)
    """
    if ts.ndim <= 2:
        return analyses1.epochs_distributed(
                ts, variability, threshold, minlength, plot)
    else:
        return distob.vectorize(analyses1.epochs)(
                ts, variability, threshold, minlength, plot)
Esempio n. 3
0
def variability_fp(ts, freqs=None, ncycles=6, plot=True):
    """Example variability function.
    Gives two continuous, time-resolved measures of the variability of a
    time series, ranging between -1 and 1. 
    The two measures are based on variance of the centroid frequency and 
    variance of the height of the spectral peak, respectively.
    (Centroid frequency meaning the power-weighted average frequency)
    These measures are calculated over sliding time windows of variable size.
    See also: Blenkinsop et al. (2012) The dynamic evolution of focal-onset 
              epilepsies - combining theoretical and clinical observations
    Args:
      ts  Timeseries of m variables, shape (n, m). Assumed constant timestep.
      freqs   (optional) List of frequencies to examine. If None, defaults to
              50 frequency bands ranging 1Hz to 60Hz, logarithmically spaced.
      ncycles  Window size, in number of cycles of the centroid frequency.
      plot  bool  Whether to display the output

    Returns:
      variability   Timeseries of shape (n, m, 2)  
                    variability[:, :, 0] gives a measure of variability 
                    between -1 and 1 based on variance of centroid frequency.
                    variability[:, :, 1] gives a measure of variability 
                    between -1 and 1 based on variance of maximum power.
    """
    if ts.ndim <= 2:
        return analyses1.variability_fp(ts, freqs, ncycles, plot)
    else:
        return distob.vectorize(analyses1.variability_fp)(
                ts, freqs, ncycles, plot)
Esempio n. 4
0
def epochs_joint(ts, variability=None, threshold=0.0, minlength=1.0,
                 proportion=0.75, plot=True):
    """Identify epochs within a multivariate time series where at least a 
    certain proportion of channels are "stationary", based on a previously 
    computed variability measure.

    (Note: This requires an IPython cluster to be started first, 
     e.g. on a workstation type 'ipcluster start')

    Args:
      ts  Timeseries of m variables, shape (n, m). 
      variability  (optional) Timeseries of shape (n, m),  giving a scalar 
                   measure of the variability of timeseries `ts` near each 
                   point in time. (if None, we will use variability_fp())
      threshold   The maximum variability permitted in stationary epochs.
      minlength   Shortest acceptable epoch length (in seconds)
      proportion  Require at least this fraction of channels to be "stationary"
      plot  bool  Whether to display the output

    Returns: (variability, joint_epochs)
      joint_epochs: list of tuples
      A list of tuples (start, end) that give the starting and ending indices 
      of time epochs that are stationary for at least `proportion` of channels.
      (epochs are inclusive of start point but not the end point)
    """
    if ts.ndim <= 2:
        return analyses1.epochs_joint(
                ts, variability, threshold, minlength, plot)
    else:
        return distob.vectorize(analyses1.epochs_joint)(
                ts, variability, threshold, minlength, plot)
Esempio n. 5
0
def first_return_times(dts, c=None, d=0.0):
    """For an ensemble of time series, return the set of all time intervals
    between successive returns to value c for all instances in the ensemble.
    If c is not given, the default is the mean across all times and across all
    time series in the ensemble.

    Args:
      dts (DistTimeseries)

      c (float): Optional target value (default is the ensemble mean value)

      d (float): Optional min distance from c to be attained between returns

    Returns:
      array of time intervals (Can take the mean of these to estimate the
      expected first return time for the whole ensemble)
    """
    if c is None:
        c = dts.mean()
    vmrt = distob.vectorize(analyses1.first_return_times)
    all_intervals = vmrt(dts, c, d)
    if hasattr(type(all_intervals), '__array_interface__'):
        return np.ravel(all_intervals)
    else:
        return np.hstack([distob.gather(ilist) for ilist in all_intervals])
Esempio n. 6
0
File: misc.py Progetto: mattja/nsim
def first_return_times(dts, c=None, d=0.0):
    """For an ensemble of time series, return the set of all time intervals
    between successive returns to value c for all instances in the ensemble.
    If c is not given, the default is the mean across all times and across all
    time series in the ensemble.

    Args:
      dts (DistTimeseries)

      c (float): Optional target value (default is the ensemble mean value)

      d (float): Optional min distance from c to be attained between returns

    Returns:
      array of time intervals (Can take the mean of these to estimate the
      expected first return time for the whole ensemble)
    """
    if c is None:
        c = dts.mean()
    vmrt = distob.vectorize(analyses1.first_return_times)
    all_intervals = vmrt(dts, c, d)
    if hasattr(type(all_intervals), '__array_interface__'):
        return np.ravel(all_intervals)
    else:
        return np.hstack([distob.gather(ilist) for ilist in all_intervals])
Esempio n. 7
0
def epochs(ts, variability=None, threshold=0.0, minlength=1.0, plot=True):
    """Identify "stationary" epochs within a time series, based on a 
    continuous measure of variability.
    Epochs are defined to contain the points of minimal variability, and to 
    extend as wide as possible with variability not exceeding the threshold.

    Args:
      ts  Timeseries of m variables, shape (n, m). 
      variability  (optional) Timeseries of shape (n, m, q),  giving q scalar 
                   measures of the variability of timeseries `ts` near each 
                   point in time. (if None, we will use variability_fp())
                   Epochs require the mean of these to be below the threshold.
      threshold   The maximum variability permitted in stationary epochs.
      minlength   Shortest acceptable epoch length (in seconds)
      plot  bool  Whether to display the output

    Returns: (variability, allchannels_epochs) 
      variability: as above
      allchannels_epochs: (list of) list of tuples
      For each variable, a list of tuples (start, end) that give the 
      starting and ending indices of stationary epochs.
      (epochs are inclusive of start point but not the end point)
    """
    if ts.ndim <= 2:
        return analyses1.epochs_distributed(ts, variability, threshold,
                                            minlength, plot)
    else:
        return distob.vectorize(analyses1.epochs)(ts, variability, threshold,
                                                  minlength, plot)
Esempio n. 8
0
def variability_fp(ts, freqs=None, ncycles=6, plot=True):
    """Example variability function.
    Gives two continuous, time-resolved measures of the variability of a
    time series, ranging between -1 and 1. 
    The two measures are based on variance of the centroid frequency and 
    variance of the height of the spectral peak, respectively.
    (Centroid frequency meaning the power-weighted average frequency)
    These measures are calculated over sliding time windows of variable size.
    See also: Blenkinsop et al. (2012) The dynamic evolution of focal-onset 
              epilepsies - combining theoretical and clinical observations
    Args:
      ts  Timeseries of m variables, shape (n, m). Assumed constant timestep.
      freqs   (optional) List of frequencies to examine. If None, defaults to
              50 frequency bands ranging 1Hz to 60Hz, logarithmically spaced.
      ncycles  Window size, in number of cycles of the centroid frequency.
      plot  bool  Whether to display the output

    Returns:
      variability   Timeseries of shape (n, m, 2)  
                    variability[:, :, 0] gives a measure of variability 
                    between -1 and 1 based on variance of centroid frequency.
                    variability[:, :, 1] gives a measure of variability 
                    between -1 and 1 based on variance of maximum power.
    """
    if ts.ndim <= 2:
        return analyses1.variability_fp(ts, freqs, ncycles, plot)
    else:
        return distob.vectorize(analyses1.variability_fp)(ts, freqs, ncycles,
                                                          plot)
Esempio n. 9
0
def cwt_distributed(ts, freqs=np.logspace(0, 2), wavelet=cwtmorlet, plot=True):
    """Continuous wavelet transform using distributed computation.
    (Currently just splits the data by channel. TODO split it further.)
    Note: this function requires an IPython cluster to be started first.

    Args:
      ts: Timeseries of m variables, shape (n, m). Assumed constant timestep.
      freqs: list of frequencies (in Hz) to use for the tranform. 
        (default is 50 frequency bins logarithmic from 1Hz to 100Hz)
      wavelet: the wavelet to use. may be complex. see scipy.signal.wavelets
      plot: whether to plot time-resolved power spectrum

    Returns: 
      coefs: Continuous wavelet transform output array, shape (n,len(freqs),m)
    """
    if ts.ndim is 1 or ts.shape[1] is 1:
        return cwt(ts, freqs, wavelet, plot)
    import distob
    vcwt = distob.vectorize(cwt)
    coefs = vcwt(ts, freqs, wavelet, plot=False)
    if plot:
        _plot_cwt(ts, coefs, freqs)
    return coefs
Esempio n. 10
0
File: freq.py Progetto: mattja/nsim
def cwt_distributed(ts, freqs=np.logspace(0, 2), wavelet=cwtmorlet, plot=True):
    """Continuous wavelet transform using distributed computation.
    (Currently just splits the data by channel. TODO split it further.)
    Note: this function requires an IPython cluster to be started first.

    Args:
      ts: Timeseries of m variables, shape (n, m). Assumed constant timestep.
      freqs: list of frequencies (in Hz) to use for the tranform. 
        (default is 50 frequency bins logarithmic from 1Hz to 100Hz)
      wavelet: the wavelet to use. may be complex. see scipy.signal.wavelets
      plot: whether to plot time-resolved power spectrum

    Returns: 
      coefs: Continuous wavelet transform output array, shape (n,len(freqs),m)
    """
    if ts.ndim is 1 or ts.shape[1] is 1:
        return cwt(ts, freqs, wavelet, plot)
    import distob
    vcwt = distob.vectorize(cwt)
    coefs = vcwt(ts, freqs, wavelet, plot=False)
    if plot:
        _plot_cwt(ts, coefs, freqs)
    return coefs
Esempio n. 11
0
def epochs_joint(ts,
                 variability=None,
                 threshold=0.0,
                 minlength=1.0,
                 proportion=0.75,
                 plot=True):
    """Identify epochs within a multivariate time series where at least a 
    certain proportion of channels are "stationary", based on a previously 
    computed variability measure.

    (Note: This requires an IPython cluster to be started first, 
     e.g. on a workstation type 'ipcluster start')

    Args:
      ts  Timeseries of m variables, shape (n, m). 
      variability  (optional) Timeseries of shape (n, m),  giving a scalar 
                   measure of the variability of timeseries `ts` near each 
                   point in time. (if None, we will use variability_fp())
      threshold   The maximum variability permitted in stationary epochs.
      minlength   Shortest acceptable epoch length (in seconds)
      proportion  Require at least this fraction of channels to be "stationary"
      plot  bool  Whether to display the output

    Returns: (variability, joint_epochs)
      joint_epochs: list of tuples
      A list of tuples (start, end) that give the starting and ending indices 
      of time epochs that are stationary for at least `proportion` of channels.
      (epochs are inclusive of start point but not the end point)
    """
    if ts.ndim <= 2:
        return analyses1.epochs_joint(ts, variability, threshold, minlength,
                                      plot)
    else:
        return distob.vectorize(analyses1.epochs_joint)(ts, variability,
                                                        threshold, minlength,
                                                        plot)
Esempio n. 12
0
def periods(dts, phi=0.0):
    """For an ensemble of oscillators, return the set of periods lengths of 
    all successive oscillations of all oscillators.

    An individual oscillation is defined to start and end when the phase 
    passes phi (by default zero) after completing a full cycle.

    If the timeseries of an oscillator phase begins (or ends) exactly at phi, 
    then the first (or last) oscillation will be included.

    Arguments:
      dts (DistTimeseries): where dts.shape[1] is 1 (single output variable
        representing phase) and axis 2 ranges over multiple realizations of
        the oscillator.

      phi=0.0: float
          A single oscillation starts and ends at phase phi (by default zero).
    """
    vperiods = distob.vectorize(analyses1.periods)
    all_periods = vperiods(dts, phi)
    if hasattr(type(all_periods), '__array_interface__'):
        return np.ravel(all_periods)
    else:
        return np.hstack([distob.gather(plist) for plist in all_periods])
Esempio n. 13
0
File: phase.py Progetto: mattja/nsim
def periods(dts, phi=0.0):
    """For an ensemble of oscillators, return the set of periods lengths of 
    all successive oscillations of all oscillators.

    An individual oscillation is defined to start and end when the phase 
    passes phi (by default zero) after completing a full cycle.

    If the timeseries of an oscillator phase begins (or ends) exactly at phi, 
    then the first (or last) oscillation will be included.

    Arguments:
      dts (DistTimeseries): where dts.shape[1] is 1 (single output variable
        representing phase) and axis 2 ranges over multiple realizations of
        the oscillator.

      phi=0.0: float
          A single oscillation starts and ends at phase phi (by default zero).
    """
    vperiods = distob.vectorize(analyses1.periods)
    all_periods = vperiods(dts, phi)
    if hasattr(type(all_periods), '__array_interface__'):
        return np.ravel(all_periods)
    else:
        return np.hstack([distob.gather(plist) for plist in all_periods])
Esempio n. 14
0

# you can use the distributed array as if it were local:

print('\nmeans of each column:\n%s' % ar.mean(axis=0))

print('\nbasic slicing:\n%s' % ar[1000:1010, 5:9])

print('\nadvanced slicing:\n%s' %
      ar[np.array([20, 7, 7, 9]), np.array([1, 2, 2, 15])])

# numpy computations (and ufuncs) will automatically be done in parallel:

print('\nparallel computation with distributed arrays: ar - ar \n%s' %
      (ar - ar))

print('\nparallel computation with distributed arrays: np.exp(1.0j * ar)\n%s' %
      np.exp(1.0j * ar))


# functions that expect ordinary arrays can now compute in parallel:

from scipy.signal import decimate
vdecimate = distob.vectorize(decimate)
result = vdecimate(ar, 10, axis=0)
print('\ndecimated ar:\n%s' % result)

# another way to write that:
result = distob.apply(decimate, ar, 10, axis=0)
print('\ndecimated ar:\n%s' % result)