def test_find_experimental_edge_energies_re_analyze_false(self): # Test the keywork re_analyze. The function keeps all edge data from the previous analysis. If re_analyze = False, # the function will only change the filtering options to select more or less edges. The filtering should be very # fast. file_name = os.path.join('Test_Data','DectrisSI_callibrated.dat') energies, eels_spectrum = numpy.loadtxt(file_name, delimiter=',',unpack=True) energy_step = (energies[-1] - energies[0])/energies.size energy_range_ev = numpy.array([energies[0],energies[-1]+energy_step]) itest=0 print("Start 10 iterations with analysis.") while itest < 10: edge_energies,q_factors = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev) print(itest) itest+=1 print('Done.') itest=0 print("Start 10 iterations without analysis.") while itest < 10: edge_energies2,q_factors2 = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev,re_analyze=False) assert numpy.all(edge_energies == edge_energies2) assert numpy.all(q_factors == q_factors2) print(itest) itest+=1 print('Done')
def test_power_law_background_1d_with_negative_values(self): # tests only that it can handle negative values; not that it is accurate shape = 1000 background = 10000 * numpy.power(numpy.linspace(1,10,shape), -4) # 0 -> 10000 background = background + numpy.random.poisson(numpy.full((shape,), 100)) - 100 fit_range = range(400, 500) self.assertLess(numpy.amin(background[fit_range]), 0) spectral_range = numpy.array([0, shape]) edge_onset = fit_range.stop edge_delta = 100.0 bkgd_range = numpy.array([fit_range.start, fit_range.stop]) analyzer.core_loss_edge(background, spectral_range, edge_onset, edge_delta, bkgd_range)
def test_core_loss_edge_1d(self): # this edge in Swift: # 10 * pow(linspace(1,10,1000), -4) + 0.01 * rescale(gammapdf(linspace(0, 1, 1000), 1.3, 0.5, 0.01)) scale = 1E4 background = scale * numpy.power(numpy.linspace(1,10,1000), -4) raw_signal = scipy.stats.gamma(a=1.3, loc=0.5, scale=0.01).pdf(numpy.linspace(0,1,1000)) signal = scale / 1000 * (raw_signal - numpy.amin(raw_signal)) / numpy.ptp(raw_signal) spectrum = background + signal self.assertGreaterEqual(numpy.amin(spectrum), 1) spectral_range = numpy.array([0, 1000]) edge_onset = 500.0 edge_delta = 100.0 bkgd_range = numpy.array([400.0, 500.0]) edge_map, edge_profile, total_integral, bkgd_model, profile_range = analyzer.core_loss_edge(spectrum, spectral_range, edge_onset, edge_delta, bkgd_range) self.assertEqual(tuple(profile_range), (400, 600)) self.assertEqual(edge_map.shape, (1, )) signal_slice = signal[400:600] expected_edge_map = numpy.trapz(signal_slice) log10_scale = int(-math.log10(scale) + 1) # 1/10 of scale self.assertAlmostEqual(edge_map[0], expected_edge_map, 1 + log10_scale) # within 1/10 self.assertEqual(edge_profile.shape, (1, 200)) # J Kas - 200 was failing, I think because of my change to denominator in finding step size in # CurveFittingAndAnalysis.py self.assertAlmostEqual(numpy.amin(signal_slice), numpy.amin(edge_profile), 2 + log10_scale) # within 1/100 self.assertAlmostEqual(numpy.amax(signal_slice), numpy.amax(edge_profile), 2 + log10_scale) # within 1/100 self.assertAlmostEqual(numpy.average(signal_slice), numpy.average(edge_profile), 4 + log10_scale) # within 1/10000
def test_stoichiometry_found_from_experimental_eels(self): # Read EELS data from file (BN). This is data from Tracy, taken from a thin part of the sample, # and represents to some extent a best case scenario. test_data_dir = Path(__file__).parent / 'Test_Data' data_files = [test_data_dir / 'BN_0-0910eV_.msa', test_data_dir / 'CaCO3.msa', test_data_dir / 'CuO.msa'] labels = ['BN', 'CaCO_3','CuO'] #data_file = Path('./Test_Data/EELS_Thick.csv') #data_file = Path('./Test_Data/EELS_Thin.csv') atomic_number_arrays = [[7,5],[8,20,6],[29,8]] beam_energies = [200.0,200.0,200.0] convergence_angles = [0.0, 0.0, 0.0] collection_angles = [100.0, 100.0, 100.0] edge_onset_arrays = [[401.0, 188.0],[532.0, 346.0, 284.0], [931.0,532.0]] edge_delta_arrays = [[25.0,25.0],[40.0,25.0,25.0],[40.0,40.0]] background_arrays = [ [numpy.array([358.0,393.0]),numpy.array([167.0,183.0])], [numpy.array([474.0, 521.0]),numpy.array([308.0,339.0]),numpy.array([253.0,278.0])], [numpy.array([831.0,912.0]),numpy.array([474.0,521.0])]] DM_Stoichiometries = [[1.0, 0.83], [1.0, 0.76, 0.27],[1.0,0.09]] True_Stoichiometries = [[1.0,1.0],[1.0, 0.3333, 0.3333],[1.0,1.0]] iData = 0 for data_file in data_files: energy_grid,spectrum = numpy.loadtxt(data_file, delimiter=',',unpack=True) # Set up input to stoichiometry quantification. All settings are hard coded to # BN to match DM 2.32.888. beam_energy_keV = beam_energies[iData] atomic_numbers = atomic_number_arrays[iData] convergence_angle_mrad = convergence_angles[iData] collection_angle_mrad = collection_angles[iData] edge_onsets = edge_onset_arrays[iData] edge_deltas = edge_delta_arrays[iData] background_ranges = background_arrays[iData] erange = numpy.zeros(2) erange[0] = energy_grid[0] erange[1] = energy_grid[-1] stoich_data = EELS_DataAnalysis.stoichiometry_from_eels(spectrum,erange,background_ranges,atomic_numbers,edge_onsets,edge_deltas, beam_energy_keV*1000.0, convergence_angle_mrad/1000.0, collection_angle_mrad/1000.0) stoich = stoich_data[0] error_in_stoich = stoich_data[1] iAtom = 0 DM_Stoichometry = DM_Stoichiometries[iData] True_Stoichiometry = True_Stoichiometries[iData] print("----------------------------------------------------------------------------\n\n\n") print('Stoichiometry from experimental EELS data from the EELS Atlas:' + labels[iData]) print('atomic#, N, N from DM, True N') for atomic_number in atomic_numbers: print(atomic_number, stoich[iAtom][0],'+/-',error_in_stoich[iAtom][0],DM_Stoichometry[iAtom], True_Stoichiometry[iAtom]) iAtom += 1 print("----------------------------------------------------------------------------") iData += 1
def test_stoichiometry_found_from_theoretical_EELS(self): if True: return from atomic_eels import atomic_diff_cross_section # Make BN eels data using FEFF. atomic_numbers=[5,7] amps =[1.0,1.0] edge_label = 'K' beam_energy_keV = 200.0 convergence_angle_mrad = 1.5 collection_angle_mrad = 1.5 egrid_eV = numpy.arange(0.0, 1000.0, 0.5) # Define energy grid energyDiffSigma_total = numpy.zeros_like(egrid_eV) # Initialize total cross section. edge_onsets = [0.0, 0.0] edge_deltas = [0.0, 0.0] background_ranges = [numpy.zeros(2),numpy.zeros(2)] iEdge = 0 for atomic_number in atomic_numbers: #print(atomic_number,edge_label,beam_energy_keV,convergence_angle_mrad,collection_angle_mrad) energyDiffSigma,edge_onsets[iEdge] = atomic_diff_cross_section(atomic_number, edge_label, beam_energy_keV, convergence_angle_mrad, collection_angle_mrad, egrid_eV) energyDiffSigma_total = numpy.add(energyDiffSigma_total,energyDiffSigma*amps[iEdge]) # set background ranges and offsets, etc. background_ranges[iEdge][0] = max(edge_onsets[iEdge]-30.0,0.0) background_ranges[iEdge][1] = max(edge_onsets[iEdge]-5.0,0.0) edge_deltas[iEdge] = 30.0 iEdge += 1 #print('bgr',background_ranges,edge_onsets) bgfunc = lambda x: 1.0e-3/(x+10.0)**3 background = numpy.vectorize(bgfunc) energyDiffSigma_total = numpy.add(energyDiffSigma_total,background(egrid_eV))*10.0e12 #plt.plot(egrid_eV,energyDiffSigma_total) #plt.show() #noise = numpy.random.normal(0.0, max(energyDiffSigma_total)/100.0,energyDiffSigma_total.size) #energyDiffSigma_total = numpy.add(noise,energyDiffSigma_total) erange = numpy.zeros(2) estep = (egrid_eV[-1] - egrid_eV[0])/(egrid_eV.size - 1) print(estep) erange[0] = egrid_eV[0] erange[1] = egrid_eV[-1] + estep stoich_data = EELS_DataAnalysis.stoichiometry_from_eels(energyDiffSigma_total,erange,background_ranges,atomic_numbers,edge_onsets,edge_deltas, beam_energy_keV*1000.0, convergence_angle_mrad/1000.0, collection_angle_mrad/1000.0) stoichiometry = stoich_data[0] error_in_stoichiometry = stoich_data[1] iAtom = 0 print("Stoichiometry from theoretical EELS signal of BN:") for atomic_number in atomic_numbers: print(atomic_number, stoichiometry[iAtom][0], '+/-', error_in_stoichiometry[iAtom][0]) assert abs(stoichiometry[iAtom][0]/amps[iAtom]*amps[0]-0.5) < 0.01 # Test stoichiometry found is better than 1% iAtom += 1
def test_find_experimental_edge_energies_sensitivity(self): # Test changes to the sensitivity, which will include less poles as it decreases from 1 to 0. file_name = os.path.join('Test_Data','DectrisSI_callibrated.dat') energies, eels_spectrum = numpy.loadtxt(file_name, delimiter=',',unpack=True) energy_step = (energies[-1] - energies[0])/energies.size energy_range_ev = numpy.array([energies[0],energies[-1]+energy_step]) print("Do initial analysis.") # First set sensitivity to 1.0, this will find many edges. edge_energies,q_factors = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev,correlation_cutoff_scale=0.0, debug_plotting=False) print("Number of edges found = ", edge_energies.size) scale=0.0 # Now change sensitivity parameter from 0 to 1 in steps of 0.1. Number of edges should go from 1 to some max depending on the data and the parameters used (in this case it is 14). print("Start 10 iterations with vaying sensitivity.") while scale <= 1.0: edge_energies,q_factors = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev,re_analyze=False,correlation_cutoff_scale=scale,debug_plotting=False) print("Sensitivity: ", 1.0-scale) print("Number of edges found = ", edge_energies.size) print(" ") scale = scale + 0.1
def test_power_law_background_1d(self): # this edge in Swift: # 10 * pow(linspace(1,10,1000), -4) scale = 1E4 background = scale * numpy.power(numpy.linspace(1,10,1000), -4) self.assertGreaterEqual(numpy.amin(background), 1) spectral_range = numpy.array([0, 1000]) edge_onset = 500.0 edge_delta = 100.0 bkgd_range = numpy.array([400.0, 500.0]) signal_background = background[400:600] edge_map, edge_profile, total_integral, bkgd_model, profile_range = analyzer.core_loss_edge(background, spectral_range, edge_onset, edge_delta, bkgd_range) self.assertEqual(bkgd_model.shape, (1, 200)) # J Kas - 200 was failing, I think because of my change to denominator in finding step size in # CurveFittingAndAnalysis.py self.assertLess(abs(numpy.amin(bkgd_model) - numpy.amin(signal_background)), numpy.ptp(signal_background) / 100.0) self.assertLess(abs(numpy.amax(bkgd_model) - numpy.amax(signal_background)), numpy.ptp(signal_background) / 100.0) self.assertLess(numpy.average(bkgd_model - signal_background), scale * 0.0001)
def test_find_experimental_edge_energies_with_defaults(self): # Tests the function without any optional parameters.s file_name = os.path.join('Test_Data','DectrisSI_callibrated.dat') # Load data from file energies, eels_spectrum = numpy.loadtxt(file_name, delimiter=',',unpack=True) # Set energy range (goes from first energy to last energy + step energy_step = (energies[-1] - energies[0])/energies.size energy_range_ev = numpy.array([energies[0],energies[-1]+energy_step]) # Find edges in the spectrum. edge_energies,q_factors = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev, debug_plotting=False) # Print edges found, and edges found by visual inspection for comparison. # The edge finder will not find all edges necessarily, and might find extra edges. print("Number of edges found = ", edge_energies.size) print("Edges found:") print(numpy.sort(edge_energies).astype(int)) print("Edges in spectrum") print(114,287,460,537,564,649,804,821,858,875)
def map_background_subtracted_signal(data_and_metadata: DataAndMetadata.DataAndMetadata, electron_shell: typing.Optional[PeriodicTable.ElectronShell], fit_ranges, signal_range) -> DataAndMetadata.DataAndMetadata: """Subtract si_k background from data and metadata with signal in first index.""" signal_index = -1 signal_length = data_and_metadata.dimensional_shape[signal_index] signal_range = (numpy.asarray(signal_range) * signal_length).astype(numpy.float) signal_calibration = data_and_metadata.dimensional_calibrations[signal_index] spectral_range = numpy.array([signal_calibration.convert_to_calibrated_value(0), signal_calibration.convert_to_calibrated_value(data_and_metadata.dimensional_shape[signal_index])]) edge_onset = signal_calibration.convert_to_calibrated_value(signal_range[0]) edge_delta = signal_calibration.convert_to_calibrated_value(signal_range[1]) - edge_onset bkgd_ranges = numpy.array([numpy.array([signal_calibration.convert_to_calibrated_value(fit_range[0] * signal_length), signal_calibration.convert_to_calibrated_value(fit_range[1] * signal_length)]) for fit_range in fit_ranges]) cross_section = None if electron_shell is not None: beam_energy_ev = data_and_metadata.metadata.get("beam_energy_eV") beam_convergence_angle_rad = data_and_metadata.metadata.get("beam_convergence_angle_rad") beam_collection_angle_rad = data_and_metadata.metadata.get("beam_collection_angle_rad") if beam_energy_ev is not None and beam_convergence_angle_rad is not None and beam_collection_angle_rad is not None: cross_section_data = partial_cross_section_nm2(electron_shell.atomic_number, electron_shell.shell_number, electron_shell.subshell_index, edge_onset, edge_delta, beam_energy_ev, beam_convergence_angle_rad, beam_collection_angle_rad) cross_section = cross_section_data[0] data = data_and_metadata.data # Fit within fit_range; calculate background within signal_range; subtract from source signal range edge_map, edge_profile, total_integral, bkgd_model, profile_range = EELS_DataAnalysis.core_loss_edge(data, spectral_range, edge_onset, edge_delta, bkgd_ranges) result = edge_map if cross_section is None else edge_map / cross_section dimensional_calibrations = data_and_metadata.dimensional_calibrations[0:-1] intensity_calibration = copy.deepcopy(data_and_metadata.intensity_calibration) if cross_section is not None: intensity_calibration.units = "~" return DataAndMetadata.new_data_and_metadata(result, intensity_calibration, dimensional_calibrations)
def test_power_law_background_1d(self): # this edge in Swift: # 10 * pow(linspace(1,10,1000), -4) scale = 1E4 background = scale * numpy.power(numpy.linspace(1, 10, 1000), -4) self.assertGreaterEqual(numpy.amin(background), 1) spectral_range = numpy.array([0, 1000]) edge_onset = 500.0 edge_delta = 100.0 bkgd_range = numpy.array([400.0, 500.0]) signal_background = background[400:600] edge_map, edge_profile, bkgd_model, profile_range = analyzer.core_loss_edge( background, spectral_range, edge_onset, edge_delta, bkgd_range) self.assertEqual(bkgd_model.shape, (1, 200)) self.assertLess( abs(numpy.amin(bkgd_model) - numpy.amin(signal_background)), numpy.ptp(signal_background) / 100.0) self.assertLess( abs(numpy.amax(bkgd_model) - numpy.amax(signal_background)), numpy.ptp(signal_background) / 100.0) self.assertLess(numpy.average(bkgd_model - signal_background), scale * 0.0001)
def calculate_background_signal(data_and_metadata: DataAndMetadata.DataAndMetadata, fit_ranges, signal_range) -> DataAndMetadata.DataAndMetadata: """Calculate background from data and metadata with signal in first index.""" signal_index = -1 signal_length = data_and_metadata.dimensional_shape[signal_index] signal_range = (numpy.asarray(signal_range) * signal_length).astype(numpy.float) data = data_and_metadata.data if len(data_and_metadata.dimensional_calibrations) == 0: return None # Fit within fit_range; calculate background within signal_range; subtract from source signal range signal_calibration = data_and_metadata.dimensional_calibrations[signal_index] spectral_range = numpy.array([signal_calibration.convert_to_calibrated_value(0), signal_calibration.convert_to_calibrated_value(signal_length)]) edge_onset = signal_calibration.convert_to_calibrated_value(signal_range[0]) edge_delta = signal_calibration.convert_to_calibrated_value(signal_range[1]) - edge_onset # bkgd_range = numpy.array([signal_calibration.convert_to_calibrated_value(fit_range0[0]), signal_calibration.convert_to_calibrated_value(fit_range0[1])]) bkgd_ranges = numpy.array([numpy.array([signal_calibration.convert_to_calibrated_value(fit_range[0] * signal_length), signal_calibration.convert_to_calibrated_value(fit_range[1] * signal_length)]) for fit_range in fit_ranges]) # print("d {} s {} e {} d {} b {}".format(data.shape if data is not None else None, spectral_range, edge_onset, edge_delta, bkgd_range)) edge_map, edge_profile, total_integral, bkgd_model, profile_range = EELS_DataAnalysis.core_loss_edge(data, spectral_range, edge_onset, edge_delta, bkgd_ranges) # Squeeze the result result = numpy.squeeze(bkgd_model) max_channel = int(round(max([fit_range[1] * signal_length for fit_range in fit_ranges] + [signal_range[1]]))) min_channel = int(round(min([fit_range[0] * signal_length for fit_range in fit_ranges] + [signal_range[0]]))) data_shape = list(data_and_metadata.data_shape) data_shape[signal_index] = max_channel - min_channel data_shape = tuple(data_shape) dimensional_calibrations = copy.deepcopy(data_and_metadata.dimensional_calibrations) dimensional_calibrations[signal_index].offset = signal_calibration.convert_to_calibrated_value(min_channel) dimensional_calibrations[signal_index].scale = (signal_calibration.convert_to_calibrated_value(max_channel) - dimensional_calibrations[signal_index].offset) / data_shape[signal_index] return DataAndMetadata.new_data_and_metadata(result, data_and_metadata.intensity_calibration, dimensional_calibrations)
def test_stoichiometry_from_multidimensional_EELS(self): # Make BN eels data using FEFF. atomic_numbers=[5,7] nSpectra = 100 amps =[1.0,1.0] edge_label = 'K' beam_energy_keV = 200.0 convergence_angle_mrad = 1.5 collection_angle_mrad = 1.5 egrid_eV = numpy.arange(0.0, 1000.0, 0.5) # Define energy grid energyDiffSigma_total = numpy.array([numpy.zeros_like(egrid_eV)]*nSpectra) # Initialize total cross section. energyDiffSigma = numpy.array([numpy.zeros_like(egrid_eV)]*2) # Initialize total cross section. edge_onsets = [0.0, 0.0] edge_deltas = [0.0, 0.0] background_ranges = [numpy.zeros(2),numpy.zeros(2)] iEdge = 0 for atomic_number in atomic_numbers: energyDiffSigma[iEdge],edge_onsets[iEdge] = atomic_diff_cross_section(atomic_number, edge_label, beam_energy_keV, convergence_angle_mrad, collection_angle_mrad, egrid_eV) iEdge += 1 iEdge = 0 for atomic_number in atomic_numbers: iSpectrum = 0 while iSpectrum < nSpectra: if iEdge == 0: amps[iEdge] = numpy.sin(float(iSpectrum)/float(nSpectra)*numpy.pi/2.0)**2 else: amps[iEdge] = numpy.cos(float(iSpectrum)/float(nSpectra)*numpy.pi/2.0)**2 energyDiffSigma_total[iSpectrum] = numpy.add(energyDiffSigma_total[iSpectrum],energyDiffSigma[iEdge]*amps[iEdge]) iSpectrum += 1 # set background ranges and offsets, etc. background_ranges[iEdge][0] = max(edge_onsets[iEdge]-30.0,0.0) background_ranges[iEdge][1] = max(edge_onsets[iEdge]-5.0,0.0) edge_deltas[iEdge] = 30.0 iEdge += 1 iSpectrum = 0 # Add background. bgfunc = lambda x: 1.0e-3/(x+10.0)**3 background = numpy.vectorize(bgfunc) while iSpectrum < nSpectra: energyDiffSigma_total[iSpectrum] = numpy.add(energyDiffSigma_total[iSpectrum],background(egrid_eV))*10.0e12 iSpectrum += 1 #print('bgr',background_ranges,edge_onsets) #plt.plot(egrid_eV,energyDiffSigma_total) #plt.show() #noise = numpy.random.normal(0.0, max(energyDiffSigma_total)/100.0,energyDiffSigma_total.size) #energyDiffSigma_total = numpy.add(noise,energyDiffSigma_total) erange = numpy.zeros(2) erange[0] = egrid_eV[0] erange[1] = egrid_eV[-1] stoich_data = EELS_DataAnalysis.stoichiometry_from_eels(energyDiffSigma_total,erange,background_ranges,atomic_numbers,edge_onsets,edge_deltas, beam_energy_keV*1000.0, convergence_angle_mrad/1000.0, collection_angle_mrad/1000.0) stoichiometry = stoich_data[0] error_in_stoich = stoich_data[1] iAtom = 0 print("Stoichiometry from multidimensional spectrum array.") for atomic_number in atomic_numbers: print(atomic_number) print(stoichiometry[iAtom]) iAtom += 1
def test_eels_quantification(self): if True: # For now turn this test off. I will keep the directory of all EELS Atlas data on hand for more testing. return import glob import pandas df = pandas.read_csv("EELS_Atlas_Major/files_HE.dat", delim_whitespace=True, header=None) file_names, tmp1, efin, tmp2, estart = df.to_numpy().T i_search = 0 total_extra_found = 0 total_edges_edb = 0 total_edges_matched = 0 mintol = 0.1 nfail = 0 ntot = 0 ptable = PeriodicTable.PeriodicTable() for file_name in file_names: # Load data from text file. #data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name,recursive = True)[0] print('\n\n\n') data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name, recursive=True)[0] #edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0]+'.dat', recursive=True)[0] edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0] + '.dat', recursive=True)[0] energies, eels_spectrum = numpy.loadtxt(data_file, delimiter=',', unpack=True) energy_step = (energies[-1] - energies[0]) / energies.size energy_range_ev = numpy.array( [energies[0], energies[-1] + energy_step]) if estart[i_search] < 0: estart[i_search] = tmp1[i_search] search_range = [estart[i_search], efin[i_search]] chem_formula = file_name.split('_')[0] elements_exp = [ elem.strip(string.digits) for elem in re.findall('[A-Z][^A-Z]*', chem_formula) if str(ptable.atomic_number(elem.strip(string.digits))) in ptable.find_elements_in_energy_interval(search_range) ] print(file_name, ':') experimental_edge_data = EELS_DataAnalysis.find_experimental_edge_energies( eels_spectrum, energy_range_ev, search_range, debug_plotting=False) df = pandas.read_csv(edge_file, delim_whitespace=True, header=None) edge_data = EELS_DataAnalysis.find_species_from_experimental_edge_data( eels_spectrum, energy_range_ev, experimental_edge_data, search_range_ev=search_range, only_major_edges=True) elements_found = [ed[0] for ed in edge_data] edge_data = EELS_DataAnalysis.find_species_from_experimental_edge_data( eels_spectrum, energy_range_ev, experimental_edge_data, search_range_ev=search_range, only_major_edges=False, element_list=elements_found) edge_energies = experimental_edge_data[0] # Print edges found, and edges found by visual inspection for comparison. # The edge finder will not find all edges necessarily, and might find extra edges. elements_found = [ptable.element_symbol(ed[0]) for ed in edge_data] ntot += 1 if (all(x in elements_found for x in elements_exp)): missing = False else: print(chem_formula, ": Failed to find all elements") i_search += 1 continue edge_data = [ ed for ed in edge_data if ptable.element_symbol(ed[0]) in elements_exp ] #print('edge_data', edge_data) i_search += 1 # We now have the elements and the edges we want to analyze, lets do the quantification # Set microscope parameters beam_energy_keV = 200.0 convergence_angle_mrad = 0.0 collection_angle_mrad = 100.0 # Set up the atomic numbers, edge onsets, and background ranges atomic_numbers = [] background_ranges = [] edge_onsets = [] edge_deltas = [] iElement = 0 for ed in edge_data: iEdge = 0 edge_onset = [] while iEdge < len(ed[1]): edge_onsets = edge_onsets + [ed[1][iEdge][2]] atomic_numbers = atomic_numbers + [ed[0]] iEdge += 1 #print('Atoms in system:', atomic_numbers) deltas = 30.0 for i, onset in enumerate(edge_onsets): if i + 1 < len(edge_onsets) and atomic_numbers[ i] == atomic_numbers[i + 1]: if edge_onsets[i + 1] - onset > deltas: edge_deltas = edge_deltas + [deltas] else: edge_deltas = edge_deltas + [ deltas ] #[edge_onsets[i+1] - onset] #print(edge_deltas[-1]) else: edge_deltas = edge_deltas + [ min(deltas, energy_range_ev[1] - onset) ] if i > 0: if atomic_numbers[i] == atomic_numbers[i - 1]: #background_ranges = background_ranges + [numpy.array([max(onset - 30.0,edge_onsets[i-1]), onset - 10.0])] background_ranges = background_ranges + [ numpy.array([ max(onset - 30.0, energy_range_ev[0]), onset - 10.0 ]) ] else: background_ranges = background_ranges + [ numpy.array([ max(onset - 30.0, energy_range_ev[0]), onset - 10.0 ]) ] else: background_ranges = background_ranges + [ numpy.array([ max(onset - 30.0, energy_range_ev[0]), onset - 10.0 ]) ] #print(edge_onsets) #print(edge_deltas) #print(background_ranges) stoich, error_in_stoich, quant_data, diff_cross, egrid_ev = EELS_DataAnalysis.stoichiometry_from_eels( eels_spectrum, energy_range_ev, background_ranges, atomic_numbers, edge_onsets, edge_deltas, beam_energy_keV * 1000.0, convergence_angle_mrad / 1000.0, collection_angle_mrad / 1000.0) for iat, atm in enumerate(atomic_numbers): erange = (edge_onsets[iat] - 50.0, edge_onsets[iat] + edge_deltas[iat] + 50) edges = ptable.find_all_edges_in_energy_interval(erange, atm) print(ptable.element_symbol(atm), ':') print('Energy Range: ', edge_onsets[iat], edge_onsets[iat] + edge_deltas[iat]) for edg in edges: edgestr = edg.get_shell_str_in_eels_notation( include_subshell=True) print('\t', edgestr) print('\t', stoich[iat], error_in_stoich[iat]) print('\n\n###############################################') if False: import matplotlib.pyplot as plt e_step = (quant_data[iat][4][1] - quant_data[iat][4][0] ) / quant_data[iat][1][0].size profile_grid = numpy.arange(quant_data[iat][4][0], quant_data[iat][4][1], e_step) plt.plot(profile_grid, quant_data[iat][1][0]) plt.plot(profile_grid, quant_data[iat][3][0]) plt.plot(energies, eels_spectrum) plt.xlim(quant_data[iat][4][0] - 50, quant_data[iat][4][1] + 50) plt.plot(egrid_ev[iat], diff_cross[iat]) plt.show()
def test_find_experimental_edge_energies_EELS_Atlas(self): if True: # For now turn this test off. I will keep the directory of all EELS Atlas data on hand for more testing. return import glob import pandas import matplotlib.pyplot as plt df=pandas.read_csv("EELS_Atlas_Major/files_HE.dat", delim_whitespace=True, header=None) file_names,tmp1,efin,tmp2,estart=df.to_numpy().T i_search=0 total_extra_found = 0 total_edges_edb = 0 total_edges_matched = 0 mintol=0.1 for file_name in file_names: # Load data from text file. #data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name,recursive = True)[0] data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name,recursive = True)[0] #edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0]+'.dat', recursive=True)[0] edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0]+'.dat', recursive=True)[0] energies, eels_spectrum = numpy.loadtxt(data_file, delimiter=',',unpack=True) energy_step = (energies[-1] - energies[0])/energies.size energy_range_ev = numpy.array([energies[0],energies[-1]+energy_step]) if estart[i_search] < 0: estart[i_search] = tmp1[i_search] search_range = [estart[i_search],efin[i_search]] print(file_name,':') edge_energies,q_factors = EELS_DataAnalysis.find_experimental_edge_energies(eels_spectrum, energy_range_ev,search_range, debug_plotting = True) df=pandas.read_csv(edge_file, delim_whitespace=True, header=None) edge_names, edb_edge_energies = df.to_numpy().T edge_names = edge_names.tolist() new_q_factors = q_factors.tolist() match=[False]*edb_edge_energies.size matched=[False]*edge_energies.size i_edb = 0 all_matched = True new_edge_energies = edge_energies[:].tolist() new_edb_edge_energies = edb_edge_energies[:].tolist() matched = [] qfs=[] match_found = True print(new_edb_edge_energies) while len(new_edb_edge_energies) > 0 and len(new_edge_energies) > 0 and match_found: # Search trough all edb_edge_energy, edge_energy pairs to find minimum difference in energy min_diff = 1e10 min_i = -1 min_j = -1 match_found = False for i, edb_edge_energy in enumerate(new_edb_edge_energies): for j, energy in enumerate(new_edge_energies): if min_diff > abs(energy - edb_edge_energy): min_diff = abs(energy - edb_edge_energy) min_i = i min_j = j if min_diff/new_edb_edge_energies[min_i] < 0.03 or min_diff < 10.0: # Set matched and remove elements from lists. qfs = qfs + [new_q_factors.pop(min_j)] matched = matched + [[edge_names.pop(min_i),new_edb_edge_energies.pop(min_i), new_edge_energies.pop(min_j)]] match_found = True print("Edges matched:") print("Label Atlas Found") for m in matched: print(m) print("") print("Edges not matched:") i = 0 while i < len(new_edb_edge_energies): en = new_edb_edge_energies[i] if en > search_range[0] and en < search_range[1]: print(edge_names[i], en) i += 1 else: tmp = new_edb_edge_energies.pop(i) tmp = edge_names.pop(i) print("") print("Extra edges found:") for en in new_edge_energies: print(en) total_edges_matched += len(matched) total_edges_edb += len(new_edb_edge_energies) + len(matched) total_extra_found += len(new_edge_energies) if len(new_edb_edge_energies) + len(matched) > 0: print("Percentage of edges matched:", float(len(matched))/float(len(matched)+len(new_edb_edge_energies))*100.0) print("Percentage extra edges:", float(len(new_edge_energies))/float(len(matched)+len(new_edb_edge_energies))*100.0) print("") print("") print("") if False: ens = numpy.array([m[2] for m in matched]) qfs = numpy.array(qfs) plt.stem(ens,qfs/numpy.amax(qfs),label='matched',use_line_collection=True) plt.stem(numpy.array(edge_energies),numpy.array(q_factors)/numpy.amax(qfs),label='found',use_line_collection=True) plt.plot(energies,eels_spectrum*energies**2/numpy.amax(eels_spectrum*energies**2)) plt.show() else: print("No edges in this energy range.") print("") print("") print("") i_search=i_search+1 if total_edges_edb > 0: print("Total percentage of edges found: ", float(total_edges_matched)/float(total_edges_edb)*100.0) print("Total percentage of extra edges found: ", float(total_edges_matched - total_edges_matched)/float(total_edges_edb)*100.0) qfs = numpy.array(qfs) print("Statistics of q_factors") print("Average: ", numpy.average(qfs)) print("Average: ", numpy.median(qfs)) print("Stdev: ", numpy.std(qfs)) print("Min: ", numpy.amin(qfs)) else: print("No edges to match for this set of files.")
def test_find_species_from_experimental_edge_data(self): if True: # For now turn this test off. I will keep the directory of all EELS Atlas data on hand for more testing. return import glob import pandas df = pandas.read_csv("EELS_Atlas_Major/files_HE.dat", delim_whitespace=True, header=None) file_names, tmp1, efin, tmp2, estart = df.to_numpy().T i_search = 0 total_extra_found = 0 total_edges_edb = 0 total_edges_matched = 0 mintol = 0.1 nfail = 0 ntot = 0 ptable = PeriodicTable.PeriodicTable() for file_name in file_names: # Load data from text file. #data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name,recursive = True)[0] print('\n\n\n') data_file = glob.glob('./EELS_Atlas_Major/**/' + file_name, recursive=True)[0] #edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0]+'.dat', recursive=True)[0] edge_file = glob.glob('./EELS_Atlas_Major/**/' + 'edges_' + os.path.splitext(file_name)[0] + '.dat', recursive=True)[0] energies, eels_spectrum = numpy.loadtxt(data_file, delimiter=',', unpack=True) energy_step = (energies[-1] - energies[0]) / energies.size energy_range_ev = numpy.array( [energies[0], energies[-1] + energy_step]) if estart[i_search] < 0: estart[i_search] = tmp1[i_search] search_range = [estart[i_search], efin[i_search]] chem_formula = file_name.split('_')[0] elements_exp = [ elem.strip(string.digits) for elem in re.findall('[A-Z][^A-Z]*', chem_formula) if str(ptable.atomic_number(elem)) in ptable.find_elements_in_energy_interval(search_range) ] print(file_name, ':') experimental_edge_data = EELS_DataAnalysis.find_experimental_edge_energies( eels_spectrum, energy_range_ev, search_range, debug_plotting=False) df = pandas.read_csv(edge_file, delim_whitespace=True, header=None) edge_data = EELS_DataAnalysis.find_species_from_experimental_edge_data( eels_spectrum, energy_range_ev, experimental_edge_data, search_range_ev=search_range, only_major_edges=True) elements_found = [ed[0] for ed in edge_data] edge_data = EELS_DataAnalysis.find_species_from_experimental_edge_data( eels_spectrum, energy_range_ev, experimental_edge_data, search_range_ev=search_range, only_major_edges=False, element_list=elements_found) edge_energies = experimental_edge_data[0] # Print edges found, and edges found by visual inspection for comparison. # The edge finder will not find all edges necessarily, and might find extra edges. print("Number of edges found = ", edge_energies.size) print("Edges found:") print(numpy.sort(edge_energies).astype(int)) elements_found = [ptable.element_symbol(ed[0]) for ed in edge_data] ntot += 1 if (all(x in elements_found for x in elements_exp)): print(chem_formula, ": PASS") else: print(chem_formula, ": FAIL") print("Missing elements:") for x in elements_exp: if x not in elements_found: print(x) nfail += 1 for ed in sorted(edge_data, key=lambda x: x[0]): print(PeriodicTable.PeriodicTable().element_symbol(ed[0]), ed) i_search += 1 print('Percentage failure:', float(nfail) / float(ntot) * 100.0)