Example #1
0
def remove_spikes(data, FSAMP, TH=0.005):
    t = np.arange(len(data)) * 1 / FSAMP
    spikes = abs(np.diff(data))
    spikes = smoothGaussian(spikes, FSAMP)
    spikes = (spikes - np.min(spikes)) / (np.max(spikes) - np.min(spikes))
    indexes_spikes = np.array(np.where(spikes <= TH))[0]
    data_in = data[indexes_spikes]
    t_in = t[indexes_spikes]
    f = interp1d(t_in, data_in)
    t_out = np.arange(t_in[0], t_in[-1], 1 / FSAMP)
    data_out = f(t_out)
    return t_out, data_out
Example #2
0
def remove_spikes(data, FSAMP, TH=0.005):
    t = np.arange(len(data)) * 1/FSAMP
    spikes = abs(np.diff(data))
    spikes = smoothGaussian(spikes, FSAMP)
    spikes = (spikes - np.min(spikes))/(np.max(spikes) - np.min(spikes))
    indexes_spikes = np.array(np.where(spikes<=TH))[0]
    data_in = data[indexes_spikes]
    t_in = t[indexes_spikes]
    f = interp1d(t_in, data_in)
    t_out = np.arange(t_in[0], t_in[-1], 1/FSAMP)
    data_out = f(t_out)
    return t_out, data_out
Example #3
0
def estimate_drivers(t_gsr,
                     gsr,
                     labs,
                     T1=0.75,
                     T2=2,
                     MX=1,
                     DELTA_PEAK=0.02,
                     k_near=5,
                     grid_size=5,
                     s=0.2):
    """
    TIME_DRV, DRV, PH_DRV, TN_DRV, LABS = estimate_drivers(TIME_GSR, GSR, LABS, T1, T2, MX, DELTA_PEAK):

    Estimates the various driving components of a GSR signal.
    The IRF is a bateman function defined by the gen_bateman function.
    T1, T2, MX and DELTA_PEAK are modificable parameters (optimal 0.75, 2, 1, 0.02)
    k_near and grid_size are optional parameters, relative to the process
    s= t in seconds of gaussian smoothing
    """
    FS = int(round(1 / (t_gsr[1] - t_gsr[0])))

    #======================
    # step 1: DECONVOLUTION
    #======================

    # generating bateman function and tailored gsr
    bateman, t_bat, gsr_in = gen_bateman(MX, T1, T2, FS, gsr)
    L = len(bateman[0:np.argmax(bateman)])
    # deconvolution
    driver, residuals = spy.deconvolve(gsr_in, bateman)
    driver = driver * FS
    # gaussian smoothing (s=200 ms)
    degree = int(np.ceil(s * FS))
    driver = smoothGaussian(driver, degree)

    # generating times
    t_driver = np.arange(-L / FS, -L / FS + len(driver) / FS,
                         1 / FS) + t_gsr[0]

    driver = driver[L * 2:]
    t_driver = t_driver[L * 2:]

    mask = (t_gsr >= t_driver[0]) & (t_gsr <= t_driver[-1])
    labs = labs[mask]

    #======================
    # step 2: IDENTIFICATION OF INTER IMPULSE SECTIONS
    #======================
    # peak detection
    # we will use "our" peakdet algorithm
    #    DELTA_PEAK = np.median(abs(np.diff(driver, n=2)))
    max_driv, min_driv_temp = peakdet(driver, DELTA_PEAK)

    DELTA_MIN = DELTA_PEAK / 4
    max_driv_temp, min_driv = peakdet(driver, DELTA_MIN)

    # check alternance algorithm based on mean>0
    maxmin = np.zeros(len(driver))
    maxmin[(max_driv[:, 0]).astype(np.uint32)] = 1
    maxmin[(min_driv[:, 0]).astype(np.uint32)] = -1
    '''
                OLD CODE
    index=2
    prev=1
    markers= np.zeros(len(driver))
    while (index<len(maxmin)):
        if maxmin[index]==-1:
            portion = maxmin[prev+1: index-1]
            if np.mean(portion)<=0: # there is not a maximum between two mins
                markers[prev] = markers[prev] + 1
                markers[index] = markers[index] - 1
            prev=index
        index +=1

    start = np.arange(len(markers))[markers==1]
    end = np.arange(len(markers))[markers==-1]

    # create array of interimpulse indexes
    inter_impulse_indexes = np.array([0])
    for i in range(len(start)):
        inter_impulse_indexes = np.r_[inter_impulse_indexes, range(start[i], end[i]+1)]
    inter_impulse_indexes = np.r_[inter_impulse_indexes, len(markers)-1]
    '''
    inter_impulse_indexes, min_idx = get_interpolation_indexes(max_driv,
                                                               driver,
                                                               n=k_near)

    inter_impulse = driver[inter_impulse_indexes.astype(np.uint16)]
    t_inter_impulse = t_driver[inter_impulse_indexes.astype(np.uint16)]

    #======================
    # ESTIMATION OF THE TONIC DRIVER
    #======================
    # interpolation with time grid 10s
    t_inter_impulse_grid = np.arange(t_driver[0], t_driver[-1], grid_size)

    # estimating values on the time-grid
    inter_impulse_10 = np.array([driver[0]])

    ind_end = 0
    for index in range(1, len(t_inter_impulse_grid) - 1):
        ind_start = np.argmin(
            abs(t_inter_impulse - t_inter_impulse_grid[index - 1]))
        ind_end = np.argmin(
            abs(t_inter_impulse - t_inter_impulse_grid[index + 1]))

        if ind_end > ind_start:
            value = np.mean(inter_impulse[ind_start:ind_end])
        else:
            value = inter_impulse[ind_start]
        inter_impulse_10 = np.r_[inter_impulse_10, value]

    inter_impulse_10 = np.r_[inter_impulse_10,
                             np.mean(inter_impulse[ind_end:])]

    t_inter_impulse_grid = np.r_[t_inter_impulse_grid, t_driver[-1]]
    inter_impulse_10 = np.r_[inter_impulse_10, driver[-1]]

    f = interp1d(t_inter_impulse_grid, inter_impulse_10, kind='cubic')

    tonic_driver = f(t_driver)

    phasic_driver = driver - tonic_driver

    return t_driver, driver, phasic_driver, tonic_driver, labs
Example #4
0
def estimate_drivers(t_gsr, gsr, T1=0.75, T2=2, MX=1, DELTA_PEAK=0.02, FS=None, k_near=5, grid_size=5, s=0.2):
    """
    TIME_DRV, DRV, PH_DRV, TN_DRV = estimate_drivers(TIME_GSR, GSR, T1, T2, MX, DELTA_PEAK):

    Estimates the various driving components of a GSR signal.
    The IRF is a bateman function defined by the gen_bateman function.
    T1, T2, MX and DELTA_PEAK are modificable parameters (optimal 0.75, 2, 1, 0.02)
    k_near and grid_size are optional parameters, relative to the process
    s= t in seconds of gaussian smoothing
    """
    if FS==None:
        FS = 1/( t_gsr[1] - t_gsr[0])

    #======================
    # step 1: DECONVOLUTION
    #======================

    # generating bateman function and tailored gsr
    bateman, t_bat, gsr_in = gen_bateman(MX, T1, T2, FS, gsr)
    L =len(bateman[0:np.argmax(bateman)])
    # deconvolution
    driver, residuals=spy.deconvolve(gsr_in, bateman)
    driver = driver * FS
    # gaussian smoothing (s=200 ms)
    degree = int(np.ceil(s*FS))
    driver=smoothGaussian(driver, degree)

    # generating times
    t_driver = np.arange(-L/FS, -L/FS+len(driver)/FS, 1/FS) + t_gsr[0]

    driver = driver[L*2:]
    t_driver = t_driver[L*2:]

    #======================
    # step 2: IDENTIFICATION OF INTER IMPULSE SECTIONS
    #======================
    # peak detection
    # we will use "our" peakdet algorithm
#    DELTA_PEAK = np.median(abs(np.diff(driver, n=2)))
    max_driv, min_driv_temp = peakdet(driver, DELTA_PEAK)

    DELTA_MIN = DELTA_PEAK/4
    max_driv_temp, min_driv = peakdet(driver, DELTA_MIN)

    # check alternance algorithm based on mean>0
    maxmin=np.zeros(len(driver))
    maxmin[(max_driv[:,0]).astype(np.uint32)] =  1
    maxmin[(min_driv[:,0]).astype(np.uint32)] = -1

    '''
                OLD CODE
    index=2
    prev=1
    markers= np.zeros(len(driver))
    while (index<len(maxmin)):
        if maxmin[index]==-1:
            portion = maxmin[prev+1: index-1]
            if np.mean(portion)<=0: # there is not a maximum between two mins
                markers[prev] = markers[prev] + 1
                markers[index] = markers[index] - 1
            prev=index
        index +=1

    start = np.arange(len(markers))[markers==1]
    end = np.arange(len(markers))[markers==-1]

    # create array of interimpulse indexes
    inter_impulse_indexes = np.array([0])
    for i in range(len(start)):
        inter_impulse_indexes = np.r_[inter_impulse_indexes, range(start[i], end[i]+1)]
    inter_impulse_indexes = np.r_[inter_impulse_indexes, len(markers)-1]
    '''
    inter_impulse_indexes, min_idx=get_interpolation_indexes(max_driv, driver, n=k_near)

    inter_impulse = driver[inter_impulse_indexes.astype(np.uint16)]
    t_inter_impulse = t_driver[inter_impulse_indexes.astype(np.uint16)]

    #======================
    # ESTIMATION OF THE TONIC DRIVER
    #======================
    # interpolation with time grid 10s
    t_inter_impulse_grid = np.arange(t_driver[0], t_driver[-1], grid_size)

    # estimating values on the time-grid
    inter_impulse_10=np.array([driver[0]])

    for index in range(1, len(t_inter_impulse_grid)-1):
        ind_start = np.argmin(abs(t_inter_impulse - t_inter_impulse_grid[index-1]))
        ind_end = np.argmin(abs(t_inter_impulse - t_inter_impulse_grid[index+1]))

        if ind_end>ind_start:
            value=np.mean(inter_impulse[ind_start:ind_end])
        else:
            value=inter_impulse[ind_start]
        inter_impulse_10 = np.r_[inter_impulse_10, value]

    inter_impulse_10 = np.r_[inter_impulse_10, np.mean(inter_impulse[ind_end:])]

    t_inter_impulse_grid  = np.r_[t_inter_impulse_grid , t_driver[-1]]
    inter_impulse_10 = np.r_[inter_impulse_10, driver[-1]]


    f = interp1d(t_inter_impulse_grid, inter_impulse_10, kind='cubic')

    tonic_driver = f(t_driver)

    phasic_driver = driver - tonic_driver

    return t_driver, driver, phasic_driver, tonic_driver