Plot peaks in 3D space. 2 axies for spacial dimensions and one for Q space. """ #3D plot import from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import import matplotlib.pyplot as plt import numpy as np from data_loading.data_grid_TiNiSn import DataGrid, DataGrid_TiNiSn_500C, DataGrid_TiNiSn_600C data_dir = "/home/sasha/Desktop/iterative_curve_fitting_save_test/" regex = """params_(?P<num>.*?).csv""" peakGrid = DataGrid(data_dir, regex) fig = plt.figure() ax = fig.add_subplot(111, projection='3d') for loc in peakGrid.data.keys(): x, y = peakGrid.coord(loc) for i, peak in enumerate(peakGrid.data_at_loc(loc)[:, 1]): if peakGrid.data_at_loc(loc)[i, 0] > 10: ax.scatter([x], [y], [peak], marker='o', color='red') #xs = [x for p in peaks] #ys = [y for p in peaks] ax.set_xlabel('X') ax.set_ylabel('Y') ax.set_zlabel('Q')
import numpy as np import math import statistics """ ###################### Plotting found peaks on the diffraction patterns ###################### """ """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakBBAGrid = DataGrid(data_dir, regex) data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkg_curveParams.csv""" curveBBAGrid = DataGrid(data_dir, regex) draw_peaks = False draw_curves = True draw_min_peaks = False #load Min block peak data peakMinGrid = DataGrid(data_dir, regex) data_dir = "/home/sasha/Desktop/MinBlockCurveParams/" for loc in range(1, dataGrid.size + 1): file = data_dir + str(loc) + ".txt" #peaks = eval(open(file).read())
from data_loading.data_grid_TiNiSn import DataGrid, DataGrid_TiNiSn_500C, DataGrid_TiNiSn_600C import matplotlib.pyplot as plt import numpy as np from utils.utils import dict_to_csv from scipy.signal import find_peaks """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" #data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir, regex) save_path = "/home/sasha/Desktop/python/peak_error/peak_errors.csv" def selectPeak(event): """ print('%s click: button=%d, x=%d, y=%d, xdata=%f, ydata=%f' % ('double' if event.dblclick else 'single', event.button, event.x, event.y, event.xdata, event.ydata)) """ global cur cur = nearest_index(event.xdata) def key_press(event):
################################ Perform clustering using peak based dimension reduction (DBSCAN), then PCA reduction, and then L2 based agglomerative clustering. ################################ """ """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() ####################################################### data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir, regex) #TODO: BETTER WAY OF DOING THIS # WITHOUT EXCESSIVE LOADING peakMode = 3 #2,3 # 1 - minblock peak fitting # 2 - BBA peak fitting # 3 - BBA curve fitting #move values to right column when curve params used #easier that duplicating code below if peakMode == 3: #Reload peak grid with curve data #NOTE: NOT EFFICIENT data_dir = "/home/sasha/Desktop/peakTest/"
from mpl_toolkits.mplot3d import Axes3D # noqa: F401 unused import import matplotlib.pyplot as plt import colorsys import numpy as np from data_loading.data_grid_TiNiSn import DataGrid, DataGrid_TiNiSn_500C, DataGrid_TiNiSn_600C """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/iterative_curve_fitting_save_test/" regex = """params_(?P<num>.*?).csv""" peakGrid = DataGrid(data_dir, regex) used_points = set() #dictionary of used points total_peaks = 0 for loc in peakGrid.data.keys(): total_peaks += len(peakGrid.data_at_loc(loc)) """ Get the adjacent peaks that "connect" to a given peak """ def get_adjacent_points(x, y, Q_i, is_vert): def dir(dx, dy): if not peakGrid.in_grid(x + dx, y + dy): return None Q = peakGrid.data_at(x, y)[Q_i, 1]
parser = argparse.ArgumentParser(description='Run Peak Clustering') parser.add_argument('-d', '--delta', type=float, default=.1, help='peak shift allowance') args = parser.parse_args() #folder with data files dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/saveTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir, regex) # dataGrid.data[0][1:,1] is peak locations # dataGrid.data[0][1:,3] is peak intensity ################################## # LOAD PEAK DATA ''' k1 = 97 k2 = 134 X = dataGrid.data[k1][:,0] Y = dataGrid.data[k1][:,1] plt.plot(X,Y)
################################ Perform clustering using peak based dimension reduction and then L1 similarity in the agglomerative clustering algorithm. ################################ """ """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir,regex) """ Create a list of peaks in the form [x,y,p] """ SCALE = 100 def to_point(x,y,p): return [(x-1)/15.,(y-1)/15.,SCALE*float(p)/5] peaks = [] for k in peakGrid.data.keys(): x,y = peakGrid.coord(k) [peaks.append(to_point(x,y,p)) for p in peakGrid.data_at_loc(k)[:,1]] """
import imageio import os """ ###################### Plotting found peaks on the diffraction patterns ###################### """ """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" #data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir, regex) data_dir = "/home/sasha/Desktop/peakData_temp/" for loc in range(1, dataGrid.size + 1): file = data_dir + str(loc) + ".txt" try: peaks = eval(open(file).read()) except: print("Generating " + str(loc)) X = dataGrid.data_at_loc(loc)[:, 0] Y = dataGrid.data_at_loc(loc)[:, 1] #curve_params = fit_curves_to_data(X,Y) peaks = get_peak_indices(X, Y) open(file, 'w+').write(str(peaks)) peakGrid.data[loc] = peaks
################################ """ """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() #data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" #data_dir = "/home/sasha/Desktop/peakTest/" data_dir = "/home/sasha/Desktop/peakParamsOnMinBlocks/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" #regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkg_curveParams.csv""" peakGrid = DataGrid(data_dir,regex) ShowBBA = True isCurveParams = False #move values to right column when curve params used #easier that duplicating code below if isCurveParams: for loc in range(1,dataGrid.size+1): peakGrid.data[loc][:,1] = peakGrid.data[loc][:,2] if not ShowBBA: data_dir = "/home/sasha/Desktop/peakData_temp/" for loc in range(1,dataGrid.size+1):
Script for plotting spectra at several data values to visually compare differences. ''' from data_loading.data_grid_TiNiSn import DataGrid, DataGrid_TiNiSn_500C, DataGrid_TiNiSn_600C import matplotlib.pyplot as plt from matplotlib import cm import numpy as np import math dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/iterative_curve_fitting_save_test/" regex = """params_(?P<num>.*?).csv""" peakGrid = DataGrid(data_dir, regex) # grid locations to plot locations = [152, 151, 150, 149, 148, 147, 137, 136, 123] #how much to shift each grid location vertically #(makes it easier to see peaks) #shifts = [0,100,200,300,400] shifts = [100 * i for i in range(len(locations))] colors = cm.get_cmap("viridis") for i, k in enumerate(locations): y = dataGrid.data[k][:, 1] if len(shifts) == len(locations): y = y + shifts[i]
import colorsys from sklearn.cluster import AgglomerativeClustering from sklearn.decomposition import PCA import numpy as np from data_loading.data_grid_TiNiSn import DataGrid """ Load Data and Peak Data """ data_dir = "/home/sasha/Desktop/iterative_curve_fitting_save_test/" data_dir = "/home/sasha/Desktop/TiNiSn_500C-20190604T152446Z-001/TiNiSn_500C/params/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSub_1D_params.csv""" peakGrid = DataGrid(data_dir, regex) used_points = set() #dictionary of used points total_peaks = 0 for loc in peakGrid.data.keys(): total_peaks += len(peakGrid.data_at_loc(loc)) """ Get the adjacent peaks that "connect" to a given peak """ def get_adjacent_points(x, y, Q_i): def dir(dx, dy): if not peakGrid.in_grid(x + dx, y + dy): return None
from data_loading.data_grid_TiNiSn import DataGrid, DataGrid_TiNiSn_500C, DataGrid_TiNiSn_600C import matplotlib.pyplot as plt import numpy as np from utils.utils import csv_to_dict from scipy.signal import find_peaks """ Load Data and Peak Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/TiNiSn_500C_PeakData_0.5/" #data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkgdSu_peakParams.csv""" peakGrid = DataGrid(data_dir, regex) data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkg_curveParams.csv""" curveGrid = DataGrid(data_dir, regex) """ Smoothing function (not used) """ def smooth(list, k): smooth = [] for i in range(len(list)): a = max(i - int(k / 2), 0) b = min(i + int(k / 2), len(list) - 1) smooth.append(sum(list[a:b + 1]) / (b - a))
from peak_fitting.fit_data_sample import fit_curves_to_data, get_peak_indices from scipy.optimize import curve_fit from scipy.signal import find_peaks from scipy.special import wofz import matplotlib.pyplot as plt import numpy as np """ Load Data """ dataGrid = DataGrid_TiNiSn_500C() data_dir = "/home/sasha/Desktop/peakTest/" regex = """TiNiSn_500C_Y20190218_14x14_t60_(?P<num>.*?)_bkg_curveParams.csv""" peakGrid = DataGrid(data_dir, regex) def line(x, slope, shift): return slope * x + shift def gaussian(x, amp, cen, sig): return amp * np.exp(-(x - cen)**2 / (2 * sig**2)) / (sig * np.sqrt(2 * np.pi)) def gaussian_shift(x, amp, cen, sig, shift, slope): return amp * np.exp( -(x - cen)**2 / (2 * sig**2)) / (sig * np.sqrt(2 * np.pi)) + slope * x + shift