def sgf_calc(args_list):
    folder_to_save, raw_y, raw_ybg, raw_x = args_list
    # warning when using sgf option
    warnings.filterwarnings(action="ignore",
                            module="scipy",
                            message="^internal gelsd")
    window_param = int(input('Input window box size (must be odd number)\n:'))
    poly_param = int(input('Input polynomial order for smoothing\n:'))
    # saving parameters chosen for future inspection
    os.chdir(folder_to_save)
    with open("sgf_params.txt", "w") as sgf_file:
        sgf_file.write("Window parameter used: {} \n".format(window_param))
        sgf_file.write("Polynomial paramter used: {}".format(poly_param))
    #global norm_smooth
    smoothed_y = sgf(raw_y,
                     window_param,
                     poly_param,
                     delta=(abs(raw_y)[1] - raw_y)[0])
    smoothed_ybg = sgf(raw_ybg,
                       window_param,
                       poly_param,
                       delta=(abs(raw_ybg)[1] - raw_ybg)[0])
    # dividing filtered y data from filtered bg data
    norm_smooth = smoothed_y / smoothed_ybg
    rows = list(zip(raw_x, norm_smooth))
    with open("sgf_data.csv", "w") as f:
        writer = csv.writer(f)
        writer.writerow(["window", "polynomail order"])
        writer.writerow([window_param, poly_param])
        writer.writerow(["raw_x", "sgf_filt"])
        writer.writerows(rows)
    os.chdir('..')
    return raw_x, norm_smooth
def calculate_savgol(fill_y):
    test_win = fill_ratio*3
    if test_win%2==0:
        window=test_win+1
    else:
        window=test_win
    return sgf(fill_y, window, 2)
Esempio n. 3
0
 def applyFilter(self):
     try:
         windowSize = int(self.windowLineEdit.text())
         order = int(self.orderLineEdit.text())
         self.y_ = sgf(self.y_, windowSize, order, mode="nearest")
         self.updateGraph()
     except Exception as err:
         print("Ein Fehler ist aufgetreten!")
         print(err)
def derivative(X, deriv=2, window_length=13, polyorder=3):
    X_dev = sgf(X,
                window_length=window_length,
                polyorder=polyorder,
                deriv=deriv,
                delta=1.0,
                axis=1,
                mode='nearest')
    return X_dev
    def gen_savgol_interp(self, numpoints=None, window_ratio=None, poly=None):
        if window_ratio is None:
            window_ratio = 3
        if poly is None:
            poly = 2

        linear_interp = self.gen_linear_interp(numpoints)[1]
        segment_count = self.selector_count() - 1
        interp_ratio = (int)(len(linear_interp) / segment_count)
        test_window = interp_ratio * window_ratio
        if test_window % 2 == 0:
            window = test_window + 1
        else:
            window = test_window

        return sgf(linear_interp, window, poly)
def smooth_traj(traj, FPS, window=11, polyorder=2, n=2, cut_edges=True):
    '''
    this function's input is a flowtracks trajectory object. the output
    is a smoothed trajectory. for the smoothing a savizky golay filter
    is applied n times the position of the trajectory. the velocity
    and acceleration are calculated by a simple derivative. 
    
    if cut_edges==Ture - the returned trajectory will be shorter than the original 
    by 2*window since the begining and the end of the trajectory
    are trimed each by window.

    returns-
    new_traj - a smoothed trajectory object
    delta - the mean squared translation done by the smoothing
    '''
    if cut_edges:
        if len(traj) < window * 2 + 1:
            raise ValueError(
                'trajectory too short for window size %d with cutting' %
                window)
    else:
        if len(traj) < window + 1:
            raise ValueError('trajectory too short for window size %d' %
                             window)

    p = traj.pos()
    p_ = p
    for i in range(n):
        p_ = sgf(p_, window, polyorder, axis=0)
        v_ = np.gradient(p_)[0] * FPS
        a_ = np.gradient(p_)[0] * FPS

    delta = np.mean((p - p_)**2, axis=0)

    if cut_edges:
        p_ = p_[window:-window, :]
        v_ = v_[window:-window, :]
        a_ = a_[window:-window, :]
        t_ = traj.time()[window:-window]
    else:
        t_ = traj.time()
        new_traj = Trajectory(p_, v_, t_, traj.trajid(), accel=a_)
    return new_traj, delta
Esempio n. 7
0
def measureWeldBead(im):
    edgeIm = cv2.Canny(im, LOW_THRESH, HIGH_THRESH)

    #convert to ture binary and sum along rows
    edgeIm[edgeIm == 255] = 1
    rowSums = np.zeros(edgeIm.shape[0])
    for i in range(0, edgeIm.shape[0]):
        rowSums[i] = np.sum(edgeIm[i])
    rowSums = sgf(rowSums, 5, 2)

    #split the dimension reduced data in half
    half = rowSums.shape[0] / 2

    row1 = np.where(rowSums == rowSums[:half].max())[0][0]
    row2 = np.where(rowSums == rowSums[half:].max())[0][0]

    #debug visuals
    #plt.plot(rowSums, np.arange(len(rowSums)))
    #plt.show()
    #plt.imshow(edgeIm)
    #plt.show()
    return np.abs(row1 - row2)
Esempio n. 8
0
    
# Now for each z, what radius corresponds to encompassing 50% of total?
radius_list = []
energy_list = []
for z_index in range(N):
    row_sum = sum(A[z_index,:])
    energy_list.append(row_sum)
    tot = 0
    for r_index in range(N):
        tot += A[z_index,r_index]
        if tot >= 0.5*row_sum:
            radius_list.append(array_rvals[r_index])
            break
        
#radius_list can be noisy; smooth it
radius_list_smooth = sgf(radius_list,27,3)            
        
# Now we need to plot this surface (use cylindrical coordinates)
theta = np.linspace(0,2*np.pi,N)
Z,THETA = np.meshgrid(array_zvals,theta)
R = np.outer(np.ones(N),radius_list_smooth)

energy_min = min(energy_list)
energy_max = max(energy_list)
energy_list_colors = [(energy_min+x)/energy_max for x in energy_list]
C = np.outer(np.ones(N),energy_list_colors)
X = R * np.cos(THETA)
Y = R * np.sin(THETA)

# Separately, we plot a sphere representing the target (centered at 6 cm, radius 2 cm)
# spherical coordinates
Esempio n. 9
0
def smartSgf(data,sgfWinPc,sgfDeg,sgfDerDeg = 0):
    
    sgfWin = data.shape[0]*sgfWinPc/100 + 1 - (data.shape[0]*sgfWinPc/100)%2
    filtered = sgf(data,sgfWin,sgfDeg, deriv = sgfDerDeg)
    
    return filtered
from ephys_data import ephys_data
from baseline_divergence_funcs import firing_rates

file = 4
data_dir = '/media/bigdata/brads_data/file%i/' % file
data = ephys_data(data_dir, 1)
data.get_data()
spikes = data.spikes
spikes = spikes[0][:, 0, 0:20000]
spikes = spikes[:, np.newaxis, :]

firing, normal_firing = firing_rates(spikes, 25, 250)

fig, (ax0, ax1) = plt.subplots(nrows=2, ncols=1)
ax0.plot(np.mean(normal_firing[:, 0, :], axis=0))
filt_normal = sgf(np.mean(normal_firing[:, 0, :], axis=0), 51, 2)
ax0.plot(filt_normal)
plt.xlabel('20 seconds')
ax0.legend(['Mean Normalized Firing', 'SG Filtered Mean Normal'])
#plt.figure()
ax1.imshow(normal_firing[:, 0, :], interpolation='hermite', aspect='auto')
plt.suptitle('Brad File %i' % file)
plt.savefig(
    '/media/bigdata/firing_space_plot/plots/baseline_activity/brad_file%i_small'
    % file)
plt.close(fig)

time_series = pd.DataFrame(data=normal_firing[:, 0, :].T,
                           index=range(normal_firing.shape[-1]))
time_val = pd.DataFrame(data={'time': range(normal_firing.shape[-1])})
time_series = pd.concat([time_series, time_val], axis=1)
Esempio n. 11
0
def ToA2ACF(ToA_st=1, lag_time_min=3e3, sav_gol_param=None):
    '''
    This function takes several ToA files customly chosen from a folder and
    transforms them to ACF via TTTCU algorithm, cuts the ACFs' baselines, and
    averages them (with possible filtration)
    
    Inputs:
        ToA_st - sampling time for ToA (it is usually equal to 1 ns) [ns]
        lag_time_min - minimal lag time for the correlation function [ns]
        sav_gol_param - Savitzky-Golay filter parameters dictionary (see below)
    
    Outputs: 
        ACF - output averaged correlation function
        data - ToAs dictionary
    
    Importantly, ToAs should be stored in a txt file WITHOUT any header.
    
    Savitzki-Golay filter parameters should be stored in a dictionary as
    follows:
        {'poly': polynomial_order, 'window': window_size}
        
    The exp ** -1 decay for the ACF of 1 nm particles starts for lag times more
    than 4 miliseconds. Hence, it is important to find a base lines only for
    these lag times (more than 4 ms). We use 1 ms to have a safety factor.
    '''
    
    # open a dialog window and select some text files with ToAs
    root = Tk()
    root.fileNames = filedialog.askopenfilenames(initialdir="/",
                                                 title="Select file",
                                                 filetypes=(("ToA text files",
                                                             "*.txt"),
        ("all files","*.*"))) # get the file names
    fileNames = root.fileNames # store the file names
    root.destroy() # close the browser
    
    # convert fileNames to an array of file names
    if type(fileNames) == str:
        fileNames = list(fileNames)
    elif type(fileNames) != list and type(fileNames) != tuple:
        raise TypeError('"fileNames" must be a list of file names or a string')
    
    # data storages
    data = {}
    ACF = {}
    ACF_cut = {}
    ACF_fil_der = {}
    ACF_base_der = {}
    ACF_base = {}
    ACF_res_y = 0
    
    # apply the algorithm to each of the files
    for i, fname in enumerate(fileNames):
        
        # read the data from the current file
        data[i] = pd.read_csv(fname, names=['ToA'])
        
        # run the algorithm: the single ACF calculation
        ACF[i] = TTTCU(data[i]['ToA'].as_matrix(),
                               ToA_st=ToA_st,
                               lag_time_min=lag_time_min)
        
        # cut the values with lag times less than 3000 ns
        ACF_cut[i] = ACF[i][np.where(ACF[i][:, 0] > 3000)]
        
        # find the first derivatives of the ACF curves
        ACF_fil_der[i] = np.gradient(ACF_cut[i][:, 1])
        
        # compute the base line
        ACF_base_der[i] = ACF_cut[i][np.argmin(
            (np.mean(
                    ACF_cut[i][np.where(np.diff(np.sign(ACF_fil_der[i]))) and # average all the time points where derivative changes its sign
                           np.where(ACF_cut[i][:, 0] > 1e6), 0] # baseline cannot be found for lag times less the 1 ms
                    ) - ACF_cut[i][:, 0]) ** 2 # find the index of the closest point to one was found
                                                ), 1]
        
        # cut the baselines 
        ACF_base[i] = ACF_cut[i]
        ACF_base[i][:, 1] = ACF_base[i][:, 1] / ACF_base_der[i] - 1

        # for the future averaging
        if i != 0:
            indmax = np.min([ACF_res_y.shape[0],ACF_base[i].shape[0]])
            ACF_res_y = ACF_base[i][:indmax, 1] + ACF_res_y[:indmax]
        else:
            ACF_res_y = ACF_base[i][:, 1]
    
    # average ACF by dividing to the number of files
    ACF_res = np.c_[ACF_base[0][:, 0], ACF_res_y / (i + 1)]
    
    if sav_gol_param != None:
        if type(sav_gol_param) != dict or set(sav_gol_param.keys()) > set(['window', 'poly']):
            raise TypeError("sav_gol_param should be a dict with keys 'window', 'poly'")
        ACF_res[:, 1] = sgf(ACF_res[:, 1],
                            window_length=sav_gol_param["window"],
                            polyorder=sav_gol_param["poly"]
                            )
        
    return ACF_res, data