Exemple #1
0
def getSG(y, filtwidth=21, filtorder=2, deriv=1):
    if filtwidth < filtorder + 2:
        filtwidth = filtorder + 3
        if filtwidth % 2 == 0:
            filtwidth += 1
        print 'WARN: window size reset to {0}'.format(filtwidth)
    return sg.savitzky_golay(y, filtwidth, filtorder, deriv=deriv)
Exemple #2
0
    def update(self):
        if self._running:
            try:
                img = np.rot90(self._cam.image, 3).astype(
                    np.int16) - self._bckg
                self._viewImg.setImage(img)
                """Construct x and y arrays"""
                crop = self._roi.getArrayRegion(img, self._viewImg)
                horMax, verMax = np.unravel_index(crop.argmax(), crop.shape)
                arr = np.mean(crop, axis=1)

                reg = [
                    self._roi.pos()[0],
                    self._roi.pos()[0] + self._roi.size()[0]
                ]
                xf = np.arange(reg[0], reg[1])
                """Smooth the signal"""
                if self._smoothorder != 0:
                    arr = savitzky_golay(arr, self._smoothwindow,
                                         self._smoothorder)
                """Initial guess for fit parameters"""
                off = np.mean(arr[0:25] + arr[-26:-1]) / 2
                c = xf[-1] - (xf[-1] - xf[0]) / 2
                s = 50
                p0 = [off, np.max(arr), c, s]

                coeff, var_matrix = curve_fit(self.gaus, xf, arr, p0=p0)
                fit = self.gaus(xf, *coeff)

                self._curveRaw.setData(arr)
                self._curveFit.setData(fit)

                # FWHM = 2 * sqrt( 2 * log(2) ) * sigma
                fwhm_f = 2.3548 * coeff[3]
                fwhm_d = len(arr[arr > np.max(arr) / 2])

                self.lab_fwhm_fit.setText(str(format(fwhm_f, '.4f')) + ' px')
                self.lab_fwhm_data.setText(str(fwhm_d) + ' px')
                self.lab_bl_fwhm_fit.setText(
                    str(
                        format((1e12 * self._mm_pix * fwhm_f * 1e-3) /
                               self._C, '.4f')) + ' ps')
                self.lab_bl_fwhm_data.setText(
                    str(
                        format((1e12 * self._mm_pix * fwhm_d * 1e-3) /
                               self._C, '.4f')) + ' ps')
                self.lab_bl_sigma_fit.setText(
                    str(
                        format((1e12 * self._mm_pix * coeff[3] * 1e-3) /
                               self._C, '.4f')) + ' ps')
                self.lab_bl_sigma_data.setText(
                    str(format(np.std(arr), '.4f')) + ' px')

                self._output.setText('')

            except Exception as e:
                logging.error('Error somewhere! ' + str(e))
                self._output.setText(str(e))
Exemple #3
0
def call_peaks(scores, min_dist, iters, window, order):
    peaks = []
    for i in range(iters):
        scores = savitzky_golay(scores, window, order, deriv=0, rate=1)
    med_score = np.median(scores)
    if max(scores) < 6 * med_score:
        return peaks
    peaks, _ = find_peaks(scores, distance=min_dist, height=med_score * 3)
    return peaks
def create_data ( n_per=4, noise=0.15, obs_off=0.33, \
     window_size=0, order=4):
    """
    Create synthetic "NDVI-like" data for a fictitious time series. We return
    the original data, noisy data (using IID Gaussian noise), the QA flag as well
    as the time axis.
    
    Missing observations are simulated by drawing a random number between 0 and 1
    and checking against obs_off.
    
    Parameters
    ----------
    n_per : integer
    Observation periodicity. By default, assumes every 8 days

    noise : float
    The noise standard deviation. By default, 0.15

    obs_off : float
    The threshold to decide on missing observations in the time series.

    window_size : integer, odd
    window size for savitzky_golay filtering. A large window size will lead
    to larger data gaps by correlating the noise. Set to zero by default
    which applies no smoothing.

    order : integer
    order of the savitzky_golay filter. By default 4.
    
    """
    from savitzky_golay import savitzky_golay
    import numpy as np

    doys = np.arange(1, 365 + 1, n_per)
    ndvi_clean = np.clip(np.sin((doys - 1) / 72.), 0, 1)
    ndvi = np.clip(np.sin(doys / 72.), 0, 1)
    # add Gaussian noise of sd noise
    ndvi = np.random.normal(ndvi, noise, ndvi.shape[0])

    # set the qa flags for each sample to 1 (good data)
    qa_flag = np.ones_like(ndvi).astype(np.int32)
    passer = np.random.rand(ndvi.shape[0])
    if window_size > 0:
        # force odd
        window_size = 2 * (window_size / 2) + 1
        passer = savitzky_golay(passer, window_size=window_size, order=order)
    # assign a proportion of the qa to 0 from an ordering of the smoothed
    # random numbers
    qa_flag[np.argsort(passer)[:passer.size * obs_off]] = 0

    return (doys, ndvi_clean, ndvi, qa_flag)
def create_data ( n_per=4, noise=0.15, obs_off=0.33, \
					window_size=0, order=4):
    """
    Create synthetic "NDVI-like" data for a fictitious time series. We return
    the original data, noisy data (using IID Gaussian noise), the QA flag as well
    as the time axis.
    
    Missing observations are simulated by drawing a random number between 0 and 1
    and checking against obs_off.
    
    Parameters
    ----------
    n_per : integer
    Observation periodicity. By default, assumes every 8 days

    noise : float
    The noise standard deviation. By default, 0.15

    obs_off : float
    The threshold to decide on missing observations in the time series.

    window_size : integer, odd
    window size for savitzky_golay filtering. A large window size will lead
    to larger data gaps by correlating the noise. Set to zero by default
    which applies no smoothing.

    order : integer
    order of the savitzky_golay filter. By default 4.
    
    """
    from savitzky_golay import savitzky_golay
    import numpy as np

    doys  = np.arange ( 1, 365+1, n_per)
    ndvi_clean =  np.clip(np.sin((doys-1)/72.), 0,1) 
    ndvi =  np.clip(np.sin(doys/72.), 0,1) 
    # add Gaussian noise of sd noise
    ndvi = np.random.normal(ndvi,noise,ndvi.shape[0])
     
    # set the qa flags for each sample to 1 (good data)
    qa_flag = np.ones_like ( ndvi).astype( np.int32 )
    passer = np.random.rand( ndvi.shape[0])
    if window_size >0:
        # force odd
        window_size = 2*(window_size/2)+1
        passer = savitzky_golay(passer, window_size=window_size, order=order)
    # assign a proportion of the qa to 0 from an ordering of the smoothed 
    # random numbers
    qa_flag[np.argsort(passer)[:passer.size * obs_off]]  = 0

    return ( doys, ndvi_clean, ndvi, qa_flag )
def get_split_prep_data(train_start, train_end,
                          test_start, test_end):
    data = np.loadtxt(path_to_dataset)
    data = savitzky_golay(data[:, 1], 11, 3) # smoothed version
    print("Length of Data", len(data))

    # train data
    print "Creating train data..."

    result = []
    for index in range(train_start, train_end - sequence_length):
        result.append(data[index: index + sequence_length])
    result = np.array(result)  # shape (samples, sequence_length)
    result, result_mean = z_norm(result)

    print "Mean of train data : ", result_mean
    print "Train data shape  : ", result.shape

    train = result[train_start:train_end, :]
    np.random.shuffle(train)  # shuffles in-place
    X_train = train[:, :-1]
    y_train = train[:, -1]
    X_train, y_train = dropin(X_train, y_train)

    # test data
    print "Creating test data..."

    result = []
    for index in range(test_start, test_end - sequence_length):
        result.append(data[index: index + sequence_length])
    result = np.array(result)  # shape (samples, sequence_length)
    result, result_mean = z_norm(result)

    print "Mean of test data : ", result_mean
    print "Test data shape  : ", result.shape

    X_test = result[:, :-1]
    y_test = result[:, -1]

    print("Shape X_train", np.shape(X_train))
    print("Shape X_test", np.shape(X_test))

    X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
    X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))

    return X_train, y_train, X_test, y_test
Exemple #7
0
def get_split_prep_data(train_start, train_end, test_start, test_end):
    data = np.loadtxt(path_to_dataset)
    data = savitzky_golay(data[:, 1], 11, 3)  # smoothed version
    print("Length of Data", len(data))

    # train data
    print("Creating train data...")

    result = []
    for index in range(train_start, train_end - sequence_length):
        result.append(data[index:index + sequence_length])
    result = np.array(result)  # shape (samples, sequence_length)
    result, result_mean = z_norm(result)

    print("Mean of train data : ", result_mean)
    print("Train data shape  : ", result.shape)

    train = result[train_start:train_end, :]
    np.random.shuffle(train)  # shuffles in-place
    X_train = train[:, :-1]
    y_train = train[:, -1]
    X_train, y_train = dropin(X_train, y_train)

    # test data
    print("Creating test data...")

    result = []
    for index in range(test_start, test_end - sequence_length):
        result.append(data[index:index + sequence_length])
    result = np.array(result)  # shape (samples, sequence_length)
    result, result_mean = z_norm(result)

    print("Mean of test data : ", result_mean)
    print("Test data shape  : ", result.shape)

    X_test = result[:, :-1]
    y_test = result[:, -1]

    print("Shape X_train", np.shape(X_train))
    print("Shape X_test", np.shape(X_test))

    X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 1))
    X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))

    return X_train, y_train, X_test, y_test
	def getArgMin(self,Vpp=1,offset=0.,samples=1000,readChan="ai0",writeChan="ao0",rate=1000,fig=None,ax=None,filter=51):
		inData, outData = self.syncRamp(Vpp,offset,samples,readChan,writeChan,rate)
		filteredData = savitzky_golay(outData,filter,3)
		minimum = min(filteredData)
		inVoltageAtMin = inData[filteredData.argmin()]
		if fig != None:
			plt.figure(fig)
			ax = plt.gca()
		if fig or ax:
			print "Plotting    ",
			ax.plot(inData,outData,'k')
			ax.plot(inData,filteredData,'g')
			ax.plot(inVoltageAtMin,minimum,'r+')
			plt.draw()
			plt.show(block=False)
			print "[DONE]"
		
		return minimum,inVoltageAtMin
def getRGBThreshold(im):
    y = np.zeros((3, 256))
    for j in range(3):
        hist = skimage.exposure.histogram(im[:, :, j])
        y[j, hist[1]] = hist[0]

    ymin = np.min(y, axis=0)
    ymin = savitzky_golay(ymin, 11, 3)
    m = np.max(ymin)
    ym = np.argmax(ymin)
    coeff, _ = curve_fit(gaussian, range(256), ymin, [90.0, ym, m])

    ## MONITORING ###
    rg = range(256)
    MONITORING(3, 1, plotCurve, rg, ymin)
    MONITORING(3, 2, plotCurve, rg, gaussian(rg, *coeff), clear=False)
    ##            ###

    return coeff[1] - abs(coeff[0]) * 2.7
Exemple #10
0
def find_histogram_peaks( ndata, num_peaks=5 ) : 

    hist,bins = np.histogram(ndata,bins=256)
    np.save("hist.npy",hist)

    # zero pad the end so clipped white will show up as a peak
    tmp = np.zeros(257)
    tmp[0:256] = hist
    hist = tmp
    del tmp

    if 1 : 
        # smoothed 1st derivative of the signal
        svg = savitzky_golay.savitzky_golay( hist, 21, 5, 1 )
        np.save("svg.npy",svg)

        peaks_list = find_peaks( svg )

        # look for zero crossing
        zeros_list = find_zeros( svg )
        print "zeros_list=",zeros_list

        highest_peaks = find_highest_peaks( zeros_list, hist, num_peaks )
        print "highest_peaks=",highest_peaks
    else : 
        # nothing fancy; just use argmax repeatedly
        highest_peaks = simple_highest_peaks( ndata, hist, num_peaks )
        svg = None
#        svg = np.diff(hist)
        # 2nd derivative
#        svg = np.diff(np.diff(hist))
#        highest_peaks = np.argsort(svg)[0:num_peaks]
#        highest_peaks = np.argsort(svg)[-num_peaks:]

    print highest_peaks

    save_gray_histogram_img( hist, svg, highest_peaks )

    sorted_peaks = sorted(highest_peaks)
    histo_values = [ hist[p] for p in sorted_peaks ]

    # return the pixel values of the peaks and the histogram counts
    return sorted_peaks, histo_values
Exemple #11
0
def plot_savitzky_golay( ndata ) : 
    # kill the clipped-to-white pixels
#    ndata = ndata[:-1]

    smoothed = savitzky_golay.savitzky_golay( ndata, 21, 5, 1 )
#    print "smoothed=", smoothed

    fig = Figure()

    ax = fig.add_subplot(111)
    ax.grid()
    ax.plot(ndata)

#    ax = fig.add_subplot(212)
#    ax.grid()
    ax.plot(smoothed)

    outfilename = "svgy.pdf"
    canvas = FigureCanvasAgg(fig)
    canvas.print_figure(outfilename)
    print "wrote",outfilename

    return smoothed
Exemple #12
0
def get_split_prep_data(train_ratio):
    file = [
        100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 111, 112, 113, 114,
        115, 116, 117, 118, 119, 121, 122, 123, 124, 200, 201, 202, 203, 205,
        207, 208, 209, 210, 212, 213, 214, 215, 217, 219, 220, 221, 222, 223,
        228, 230, 231, 232, 233, 234
    ]
    file = [str(i) for i in file]
    normal = []
    fusion = []
    atrial = []
    ventri = []
    for f in file:

        nor = np.load('mitdb/N_' + f + '.npy')
        atr = np.load('mitdb/A_' + f + '.npy')
        fus = np.load('mitdb/F_' + f + '.npy')
        ven = np.load('mitdb/V_' + f + '.npy')

        np.reshape(nor, (nor.shape[0], 2))
        np.reshape(fus, (fus.shape[0], 2))
        np.reshape(atr, (atr.shape[0], 2))
        np.reshape(ven, (ven.shape[0], 2))

        print 'Ori_Shape:', nor.shape, fus.shape, atr.shape, ven.shape

        if nor.shape[0] > 0:
            nor[:, 0] = savitzky_golay(nor[:, 0], 11, 3)
            nor[:, 1] = savitzky_golay(nor[:, 1], 11, 3)
        if atr.shape[0] > 0:
            atr[:, 0] = savitzky_golay(atr[:, 0], 11, 3)
            atr[:, 1] = savitzky_golay(atr[:, 1], 11, 3)
        if fus.shape[0] > 0:
            fus[:, 0] = savitzky_golay(fus[:, 0], 11, 3)
            fus[:, 1] = savitzky_golay(fus[:, 1], 11, 3)
        if ven.shape[0] > 0:
            ven[:, 0] = savitzky_golay(ven[:, 0], 11, 3)
            ven[:, 1] = savitzky_golay(ven[:, 1], 11, 3)

        normal = ladd(normal, nor, [1, 0, 0, 0])
        atrial = ladd(atrial, atr, [0, 1, 0, 0])
        fusion = ladd(fusion, fus, [0, 0, 1, 0])
        ventri = ladd(ventri, ven, [0, 0, 0, 1])

    normal = np.array(normal)[:class_size]
    atrial = np.array(atrial)[:class_size]
    fusion = np.array(fusion)[:class_size]
    ventri = np.array(ventri)[:class_size]

    normal_size = len(normal)
    fusion_size = len(fusion)
    atrial_size = len(atrial)
    ventri_size = len(ventri)

    print 'Normal_size', normal_size
    print 'Fusion_size', fusion_size
    print 'Atrial_size', atrial_size
    print 'Ventri_size', ventri_size

    result = np.concatenate((normal, atrial, fusion, ventri))
    np.random.shuffle(result)

    label = np.array(result[:, -1].tolist())
    data = np.array(result[:, :-1].tolist())
    result = data
    print('Result shape', result.shape, label.shape)
    result, result_mean = z_norm(result)
    print('Result, Result_mean', result.shape, result_mean)
    print "Mean of train data : ", result_mean
    print "Train data shape  : ", result.shape

    # label = []
    # idx = 0
    # while idx < normal_size:
    #     label.append([1,0,0,0])
    #     idx += 1
    # idx = 0
    # while idx < atrial_size:
    #     label.append([0,1,0,0])
    #     idx += 1
    # idx = 0
    # while idx < fusion_size:
    #     label.append([0,0,1,0])
    #     idx += 1
    # idx = 0
    # while idx < ventri_size:
    #     label.append([0,0,0,1])
    #     idx += 1

    print 'Resule Label Shape', result.shape, label.shape

    train_size = int(result.shape[0] * train_ratio)
    X_train, y_train = result[:train_size], label[:train_size]
    X_test, y_test = result[train_size:], label[train_size:]

    print 'X_shape', X_train.shape, X_test.shape
    print 'y_shape', y_train.shape, y_test.shape
    X_train = np.reshape(X_train, (X_train.shape[0], X_train.shape[1], 2))
    X_test = np.reshape(X_test, (X_test.shape[0], X_train.shape[1], 2))

    return X_train, y_train, X_test, y_test
Exemple #13
0
def smooth_chromatogram_using_Savitzky_Golay(i_list):

    i_list2 = sg.savitzky_golay(
        np.array(i_list), 7,
        3)  # window size 11, polynomial order 3. Optimized for chrom
    return i_list2.tolist()
def smooth_chromatogram_using_Savitzky_Golay(i_list):

    i_list2 = sg.savitzky_golay(np.array(i_list), 7, 3)  # window size 11, polynomial order 3. Optimized for chrom
    return i_list2.tolist()
T = data[:,spec['Frame']]/fps - data[0,spec['Frame']]/fps
X = (data[:,spec['X']]-Xo)*scale
Y = (data[:,spec['Y']]-Yo)*scale
#Y = -data[:,spec['Y']]/9.67 + 90.
Rad = np.pi*data[:,spec['Phi']]/180.
Angle = Rad #data[:,spec['Phi']]
#print(T.shape, X.shape, Y.shape, Angle.shape)

"""
Calculate forward, sideward, angular and translational speed 
"""
Vf = np.multiply( (X[1:]-X[:-1]), np.cos(Rad[:-1]) ) + np.multiply( Y[1:]-Y[:-1], np.sin(Rad[:-1])) 
Vf = Vf * fps
Vs = np.multiply( -(X[1:]-X[:-1]), np.sin(Rad[:-1]) ) + np.multiply( Y[1:]-Y[:-1], np.cos(Rad[:-1]))
Vs = Vs * fps
AngleSav = savitzky_golay(Angle, 51., 3.) # window size 51, polynomial order 3
Vr = fps*(np.diff(AngleSav))
Vt = np.sqrt(np.multiply(Vf,Vf) + np.multiply(Vs,Vs));
aVt = np.abs(np.diff(Vt))

"""
Clear jumps
"""
peaks = detect_peaks(aVt, mph=10, mpd=5)
#peaks = peakutils.peak.indexes(aVt, thres=0.3, min_dist=2)

Vf_tmp = Vf
peaks_tmp = np.hstack( (np.zeros(1), peaks, (np.ones(1)*Vf.size)) ) 
numjumps = peaks_tmp.size - 1
print("#Jump events:", numjumps)
for i in range(numjumps):
			def apply_savitzky_golay(series):
				result = savitzky_golay.savitzky_golay(np.array(series), 31, 3)
				return result.tolist()
Exemple #17
0
    # 9 rows
    p = '/mnt/hit4/hit4user/PycharmProjects/my_pytorch_lstm/YNDX_191211_191223.csv'
    data = augum(p, [7])
    data = data[:500, :]  # limit
    # replace date
    dat = list(range(data.shape[0]))
    data[:, 0] = scaler_simple(dat)
    # test
    # data[:, 1] = scaler_simple(dat)
    # print(data[1101, 0])

    # SCALING
    data = my_scaler(data)  # (0,1)
    # SMOOTHING
    s = data[:, 1].copy()
    data[:, 1] = savitzky_golay(data[:, 1], 87, 7)
    # save original
    torch.save(data, open('traindata_ya_orig.pt', 'wb'))

    # PRINT origin

    plt.plot(np.arange(data.shape[0]), s, 'r', linewidth=2.0)
    plt.plot(np.arange(data.shape[0]), data[:, 1], 'b', linewidth=2.0)
    plt.show()

    # BATCHING

    # 1) strict windows
    # for di in range(0, len(data), batch_num):
    #     window = train_data[di:di + batch_num]
    #     if len(window) < 1000:  # last short window
     for yr in range(start_year,end_year+1):
         SOS_dbl_2rd.append(-9999)
         EOS_dbl_2rd.append(-9999)
         SOS_Gmax20.append(-9999)
         EOS_Gmax20.append(-9999)    
         SOS_Gmax50.append(-9999)
         EOS_Gmax50.append(-9999)       
         SOS_poly.append(-9999)
         EOS_poly.append(-9999)       
         SOS_dbl_1st.append(-9999)
         EOS_dbl_1st.append(-9999)
     continue
 
 ### replace negative NDVIS 
 multi_year_vi[ind] = np.min(multi_year_vi[~ind])
 multi_year_vi_sg=savitzky_golay.savitzky_golay(multi_year_vi,15,4)
 
 # multiyear average NDVI and gs NDVI/ nongs NDVI
 avg_multi = np.nanmean(np.reshape(multi_year_vi_sg,[len(multi_year_vi_sg)/24,24]),axis=0)
 avg_vi = np.nanmean(avg_multi)
 gs_avg_vi = np.nanmean(avg_multi[6:18])
 nongs_avg_vi = (np.nanmean(avg_multi[0:6])+np.nanmean(avg_multi[18:24]))/2
 if((avg_vi>0.1) and (gs_avg_vi > (nongs_avg_vi + 0.1))):
     if(~np.isnan(np.sum(multi_year_vi_sg))):
         [onset_ndvi,dormacy_ndvi] = Phen_Est.get_onset_dormancy_ndvi(multi_year_vi_sg,ndata_year)
     else:
         onset_ndvi = np.nan
         dormacy_ndvi = np.nan
         
     multi_year_vi_sg = np.squeeze(multi_year_vi_sg)
     
Exemple #19
0
def execution():

    pz = []
    ln = []
    dz = []
    cvi = 0
    mainth = 1.0
    thresh = 100.0
    vth = 0.0
    filtwidth = 51
    filtorder = 2
    for c in cv:
        if len(c) > 0:
            cvi += 1
            x = c[2].z
            y = c[2].f

            ifrom = np.argmax(y)

            x = x[ifrom:]
            y = y[ifrom:]

            pylab.subplot(2, 2, cvi)
            pylab.plot(x, y, 'k.')
            pylab.title(c.basename)
            der = sg.savitzky_golay(y, filtwidth, filtorder, deriv=1)
            xi = np.where(der > mainth)

            segments = []

            js = []
            trovati = xi[0]
            for j in range(len(y)):
                if j in trovati:
                    if len(js) > 0:
                        xx = x[js]
                        yy = y[js]
                        segments.append(seg(xx, yy))
                    js = []
                else:
                    js.append(j)
            if len(js) > 0:
                xx = x[js]
                yy = y[js]
                segments.append(seg(xx, yy))

            i = 1

            for s in segments:
                if s.len() > vth:
                    s.plot(i, False)
                    i = i + 1
                if s.len() > thresh:
                    pz.append(s.val())
                    ln.append(s.len())

            prev = segments[-1].val()
            for s in segments[::-1]:
                if s.len() > thresh:
                    if s.val() != prev:
                        dz.append(s.val() - prev)
                        prev = s.val()
y_filtered['tv'] = y_tv * y_max



#----------------------------
# LOWESS (locally weighted scatter plot smooth)
#----------------------------
import statsmodels.api as sm
y_filtered['lowess'] = sm.nonparametric.lowess(np.copy(y), np.copy(x), frac=0.05)[:,1]



#----------------------------
# Savitzky Golay filter
#----------------------------
y_filtered['savitzky_golay'] = SG1.savitzky_golay(np.copy(y), window_size=5, order=2, deriv=0)



#----------------------------
# Derivatives
#----------------------------
y_filtered['savitzky_golay_1']  = SG1.savitzky_golay(y_filtered['gaussian'], window_size=5, order=2, deriv=1)
y_filtered['savitzky_golay_2'] = SG1.savitzky_golay(y_filtered['gaussian'], window_size=5, order=2, deriv=2)





#-------------------------------------------
# Principal Component Analysis (time series)
Exemple #21
0
def DigitizePeaklist(peaklist,
                     resolution=0.5,
                     debug=False,
                     sg=False,
                     inputtype='tuple',
                     returntype='tuple'):
    """
    Bin the list of peaks so that resolution decreases
    --------

    Keyword arguments:
        peaklist -- input peaklist as list of tuples \n
        resolution -- size of the bins, default 0.5 \n
        debug -- True activates verbose output
        sg -- apply savitzky golay filtering
        inputtype -- tuple or list
        returntype -- tuple is a outputs two lists of mz and intensities, list
        outputs one list of tuples of mz, int
    """
    import numpy as np
    import sys

    import savitzky_golay as sg

    if inputtype == 'tuple':
        mz_in, intens_in = zip(*peaklist)

        mz = np.array(mz_in)
        counts_double = np.array(intens_in)
    elif inputtype == 'list':
        mz = np.array(peaklist[0])
        counts_double = np.array(peaklist[1])
    else:
        sys.exit('You must provide a valid inputtype (list or tuple)')

    bins = np.arange(0, max(mz) + 1, resolution)

    inds = np.digitize(mz, bins)

    if debug:
        print(mz)
        print(bins)
        for n in range(round(mz.size / resolution)):
            print(bins[inds[n] - 1], "<=", mz[n], "<", bins[inds[n]])

    mz_bins_double = []
    for m in range(mz.size):
        mz_bins_double.append(bins[inds[m] - 1])

    mz_bins_double = np.array(mz_bins_double)

    mz_counts_tuple = {}
    for index, mass in enumerate(mz_bins_double):
        if mass in mz_counts_tuple.keys():
            mz_counts_tuple[mass] += counts_double[index]
        else:
            mz_counts_tuple[mass] = counts_double[index]

    mz_bins = []
    counts = []

    for element in sorted(list(mz_counts_tuple.items())):
        mz_bins.append(element[0])
        counts.append(element[1])

    if sg:
        counts = sg.savitzky_golay(counts, 49, 3)

    if returntype == 'tuple':
        return mz_bins, counts

    if returntype == 'list':
        return zip(mz_bins, counts)
Exemple #22
0
def plotv_slice(arr,model=None,s=None,dq=None,tau=1.,r=3,boxtext=None,show=True,rf=10,tight=True):
	'''
	Plots vertical slices of arr at y=cst and x=cst respectively.
	Depending on optional parameters, also plots tau=tau (=1 by
	default) isosurface. Marks in red the "eye" of the nMBP.
	'''
	if not model: model=_model
	if dq == None: dq=bool(_parfile)
	if dq:
		tau1_level=np.array(pybold.level(model.dq.tau,tau),dtype=int)
		tau1=int(round(np.mean(tau1_level)))
	else: tau1=int(np.size(model.z.xc3)/2.)
	if s is None: s=snake.snake_from_box(model.z.rho,radius=r,start=tau1)
	sx, sy, sz = np.shape(arr)
	x, y, z = np.array(s, dtype=int).T
	xc, yc, zc = model.z.xc1[:,0,0]/1.e5, model.z.xc2[0,:,0]/1.e5, model.z.xc3[0,0,:]/1.e5
	if dq:
		tau1_sindex = int(list(s[:,2]).index(tau1))
		xmean = int(s[tau1_sindex,0])
		ymean = int(s[tau1_sindex,1])
	else:
		xmean = int(round(np.mean(x)))
		ymean = int(round(np.mean(y)))
		tau1 = 0
	sharey = False
	if tight: z0 = z
	else:
		if dq:
			xp,yp,z1p,z2p = xc, yc, zc[tau1_level[:,ymean]]-zc[tau1],zc[tau1_level[xmean,:]]-zc[tau1]
			z0min = z[0]
			z0max = z[-1]+1
			z0min = min(z0min,np.min(tau1_level[:,ymean]))
			z0max = max(z0max,1+np.max(tau1_level[:,ymean]))
			z0min = min(z0min,np.min(tau1_level[xmean,:]))
			z0max = max(z0max,1+np.max(tau1_level[ymean,:]))
			delta = z0max - z0min
			z0min = max(0, int(z0min-.1*delta))
			z0max = min(sz, int(z0max+.1*delta))
			z0 = np.arange(z0min, z0max)
			sharey=True
		else:
			z0 = range(sz)
	sliceY=arr[np.ix_(range(sx),[ymean],z0)][:,0,:].T
	sliceX=arr[np.ix_([xmean],range(sy),z0)][0,:,:].T
	vY=model.z.v2[np.ix_(range(sx),[ymean],z0)][:,0,:].T
	vX=model.z.v1[np.ix_([xmean],range(sy),z0)][0,:,:].T
	if rf > 1:	# Refine factor
		vY = refine(vY,8)
		vX = refine(vX,8)
	vY=(vY>=0.)
	vX=(vX>=0.)
	extY=np.array([xc[0],xc[sx-1],zc[z0[0]]-zc[tau1],zc[z0[-1]]-zc[tau1]])
	extX=np.array([yc[0],yc[sy-1],zc[z0[0]]-zc[tau1],zc[z0[-1]]-zc[tau1]])
	f, (ax1, ax2) = plt.subplots(1, 2, figsize=(16,5), sharey=sharey)
	ax1.set_aspect('equal')
	ax2.set_aspect('equal')
	plt.tight_layout(pad=4)
	if _modelfile:
		title = _modelfile.split('.')
		title = '.'.join(title[:max(1,len(title)-1)])
		plt.suptitle(title,fontsize=16)
	vmin = min(np.min(sliceY), np.min(sliceX))
	vmax = max(np.max(sliceY), np.max(sliceX))
	im1 = ax1.imshow(sliceY, origin='bottom',alpha=1.,cmap=None, extent=extY, vmin=vmin, vmax=vmax)
	im2 = ax2.imshow(sliceX, origin='bottom',alpha=1.,cmap=None, extent=extX, vmin=vmin, vmax=vmax)
	f.subplots_adjust(right=0.8)
	cbar_ax = f.add_axes([0.83, 0.15, 0.02, 0.69])
	cbar = f.colorbar(im2, cax=cbar_ax)
	cbar.set_label('Density [g$\,$cm$^{-3}$]', rotation=270, labelpad=20)
	ax1.imshow(-vY, origin='bottom',alpha=.4,cmap=cm.gray, extent=extY)
	ax2.imshow(vX, origin='bottom',alpha=.4,cmap=cm.gray, extent=extX)
	xp,yp,zp = xc[x],yc[y],zc[z]-zc[tau1]
	if rf > 1:
		xp,yp,zp = refine(xp,rf), refine(yp,rf), refine(zp,rf)
		xp = savitzky_golay(xp, rf**2+1, 3)
		yp = savitzky_golay(yp, rf**2+1, 3)
		zp = savitzky_golay(zp, rf**2+1, 3)
	ax1.plot(xp,zp,'r')
	ax2.plot(yp,zp,'r')
	#ax1.plot(xc[x],zc[z]-zc[tau1],'r')
	#ax2.plot(yc[y],zc[z]-zc[tau1],'r')
	if dq:
		xp,yp,z1p,z2p = xc, yc, zc[tau1_level[:,ymean]]-zc[tau1],zc[tau1_level[xmean,:]]-zc[tau1]
		if rf > 1:
			xp,yp,z1p,z2p = refine(xp,rf), refine(yp,rf), refine(z1p,rf), refine(z2p,rf)
			xp = savitzky_golay(xp, rf*(rf-1)+1, 3)
			yp = savitzky_golay(yp, rf*(rf-1)+1, 3)
			z1p = savitzky_golay(z1p, rf*(rf-1)+1, 3)
			z2p = savitzky_golay(z2p, rf*(rf-1)+1, 3)
		ax1.plot(xp,z1p,'b')
		ax2.plot(yp,z2p,'b')
	#	ax1.plot(xc,zc[tau1_level[:,ymean]]-zc[tau1],'b')
	#	ax2.plot(yc,zc[tau1_level[xmean,:]]-zc[tau1],'b')
	ax1.set_xlim((extY[0],extY[1]))
	ax1.set_ylim((extY[2],extY[3]))
	ax2.set_xlim((extX[0],extX[1]))
	ax2.set_ylim((extX[2],extX[3]))
	ax1.set_xlabel("Spatial horizontal position (X axis) [km]")
	ax2.set_xlabel("Spatial horizontal position (Y axis) [km]")
	ax1.set_ylabel("Height [km]")
	if boxtext:
		props = dict(boxstyle='round, pad=.7, rounding_size=.2', facecolor='white', edgecolor='black', alpha=.8)
		ax1.text(0.05, 0.9, boxtext, size='smaller', ha='left', va='top', transform=ax1.transAxes, bbox=props)
	if show: plt.show()
	return sliceY, sliceX
#%% Normalize to number of reads/variant, peak detect on raw and smoothed data

fivedata123 = forexpressionanalysis.NormalizeToReadNumbers(fivecov123fnorm)

fivedata123.to_pickle(homedir + 'MappingProtein/fivedata123.pkl')

from peakdetect import peakdet
from savitzky_golay import savitzky_golay


fivedata123smoothed = fivedata123.copy()
peaks123=pd.DataFrame()
peaksinfo123=[]
for var,group in fivedata123.groupby(level=0):
    peaks123.loc[int(var), 'rawnumberofpeaks'] = len(peakdet(list(group.summedreads), 0.02, list(xval))[0])
    smoothed = savitzky_golay(list(group.summedreads), 3, 1)
    for binn in np.arange(1,17):
        fivedata123smoothed.loc[int(var)].loc[binn] = smoothed[binn-1]
    peaks123.loc[int(var), 'smoothednumberofpeaks'] = len(peakdet(smoothed, 0.02, list(xval))[0])
    peaksinfo123.append(peakdet(smoothed, 0.02, list(xval)))

peakspositions123=pd.DataFrame(peaksinfo123)
peakspositions123.columns=['maxima','minima']
peakspositions123.index = peaks123.index

for var in peakspositions123.index:
    if (peaks123.loc[var,'smoothednumberofpeaks']>0):
        peaks123.loc[var, 'xpeak1'] = peakspositions123.ix[var,0][0][0]
        peaks123.loc[var, 'ypeak1'] = peakspositions123.ix[var,0][0][1]
    if (peaks123.loc[var,'smoothednumberofpeaks']>1):
        peaks123.loc[var, 'xpeak2'] = peakspositions123.ix[var,0][1][0]
def calc_ld(data, W=85., plotfits=True):

    W *= lbs2N
    m = W / g
    t = data["timeelapsed"]["values"] - data["timeelapsed"]["values"][0]
    ti = np.linspace(t[0], t[-1], 100)
    dt = np.diff(ti)

    h = data["altitude"]["values"] * ft2m
    m, b, rval, _, _ = stats.linregress(t, h)
    hi = m * ti + b
    dh = hi[:-1] - hi[1:]
    if plotfits:
        fig, ax = plt.subplots()
        ax.plot(ti, hi)
        ax.grid()
        ax.scatter(t, h, facecolor="None")
        ax.set_xlabel("Time [sec]")
        ax.set_ylabel("Altitude [m]")
        ax.set_title("R-squared value: %.3f" % (rval**2))
        fig.savefig("htest.pdf")

    r = data["roll"]["values"]
    fr = interp1d(t, r, "cubic")
    ri = fr(ti)
    from savitzky_golay import savitzky_golay
    ri = savitzky_golay(ri, window_size=41, order=2)
    if plotfits:
        fig, ax = plt.subplots()
        ax.plot(ti, ri)
        ax.grid()
        ax.scatter(t, r, facecolor="None")
        ax.set_xlabel("Time [sec]")
        ax.set_ylabel("Roll [radians]")
        fig.savefig("rtest.pdf")
    ri = 0.5 * (ri[1:] + ri[:-1])
    cosr = np.cos(ri)

    V = np.average(data["speed"]["values"]) * kts2ms

    LD = V / cosr / dh * dt
    if plotfits:
        fig, ax = plt.subplots()
        ax.plot(ti[:-1], LD)
        ax.grid()
        ax.set_xlabel("Time [sec]")
        ax.set_ylabel("L/D")
        fig.savefig("LD_constantV.pdf")

    V = data["speed"]["values"] * kts2ms
    fv = interp1d(t, V, "cubic")
    Vi = fv(ti)
    Vi = savitzky_golay(Vi, window_size=31, order=4)
    if plotfits:
        fig, ax = plt.subplots()
        ax.plot(ti, Vi)
        ax.grid()
        ax.scatter(t, V, facecolor="None")
        ax.set_xlabel("Time [sec]")
        ax.set_ylabel("Speed [m/s]")
        fig.savefig("Vtest.pdf")

    dKE = 0.5 * m * (Vi[1:]**2 - Vi[:-1]**2)
    V = 0.5 * (Vi[1:] + Vi[:-1])
    LD = 1. / ((m * g * dh / dt + dKE) / m / g * cosr / V)
    if plotfits:
        fig, ax = plt.subplots()
        ax.plot(ti[:-1], LD)
        ax.grid()
        ax.set_xlabel("Time [sec]")
        ax.set_ylabel("L/D")
        # ax.set_ylim([0, 70])
        fig.savefig("LD.pdf")
    return LD
Exemple #25
0
    print(data.shape)
    # replace date
    dat = list(range(data.shape[0]))
    data[:, 0] = scaler_simple(dat)
    # test
    # data[:, 1] = scaler_simple(dat)
    # print(data[1101, 0])

    # SCALING
    # data = my_scaler(data)  # (0,1)
    data[:, 1] = scaler_simple(data[:, 1])  # price
    data[:, 2] = scaler_simple(data[:, 2])  # value

    # SMOOTHING
    s = data[:, 1].copy()
    data[:, 1] = savitzky_golay(data[:, 1], 41, 7)
    data[:, 2] = savitzky_golay(data[:, 2], 41, 7)
    # PLOT
    plt.plot(np.arange(data.shape[0]), s, 'r', linewidth=2.0)
    plt.plot(np.arange(data.shape[0]), data[:, 1], 'b', linewidth=2.0)
    plt.show()
    # save original
    data = data[:, 1:3]  # remove date
    data = data.transpose((1, 0))
    # print(data)
    print(data.shape)  # 2, 500 #price/volume, step
    np.save('123', data)
    torch.save(data, open('traindata.pt', 'wb'))

    # PRINT origin
    window_size = 7
    order = 3

    print("Savitzky Golay hyperaparameters --> Window size: %d ; Order: %d" %
          (window_size, order))
    data_rgb_smooth = np.zeros((3, 300, 25, 2), dtype=np.float32)

    print(data_rgb.shape)
    for per in range(data_rgb.shape[3]):
        for joint in range(data_rgb.shape[2]):
            x_coord = data_rgb[0, :, joint, per]
            y_coord = data_rgb[1, :, joint, per]
            z_coord = data_rgb[2, :, joint, per]
            #print (x_coord)
            x_coord_smooth = savitzky_golay(x_coord,
                                            window_size,
                                            order,
                                            deriv=0)
            #print (x_coord_smooth)
            y_coord_smooth = savitzky_golay(y_coord,
                                            window_size,
                                            order,
                                            deriv=0)
            z_coord_smooth = savitzky_golay(z_coord,
                                            window_size,
                                            order,
                                            deriv=0)

            data_rgb_smooth[0, :, joint, per] = x_coord_smooth
            data_rgb_smooth[1, :, joint, per] = y_coord_smooth
            data_rgb_smooth[2, :, joint, per] = z_coord_smooth
seconds_arr = np.arange(0, clip_length_sec, w) * 1000
#####################################################
seconds_cuts = []
for cut in cuts:
    seconds_cuts.append(np.argmin(np.abs(seconds_arr - cut * 1000)))
    #print(second)
#%%
res2 = np.zeros((len(arr_test_subjects), len(seconds_arr)))
tmp_velocities = []
for cur_iter, user in enumerate(arr_test_subjects):
    tmp_clip = []
    tmp_arr = []
    current_sec_barrier = 0
    i = 0
    current_trajectory = savitzky_golay(np.array(data[user][clip]['yaw']),
                                        window_size=11,
                                        order=3,
                                        deriv=0)
    velocity = np.gradient(current_trajectory, data[user][clip]['time']) * 1000
    tmp_velocities.append(velocity)
    while (i < len(data[user][clip]['time'])
           and current_sec_barrier < len(seconds_arr)):
        if (data[user][clip]['time'][i] >= seconds_arr[current_sec_barrier]
                and tmp_clip != []):
            tmp_arr.append(np.mean(np.abs(tmp_clip)))
            current_sec_barrier += 1
            tmp_clip = []
#           if current_sec_barrier >= 159:
#               break
        tmp_clip.append(velocity[i])
        if current_sec_barrier == 0:
            tmp_clip[-1] = 0