def plot_components_PLSR(event): run = PLSR.run if not run.ui['reg_type'] == 'PLSR': return """Function for making plots of the latent variables from PLSR. """ ui = run.ui ui['fig_per_row'] = int(run.frame.buttons['fig_per_row'].get()) ui['max_plots'] = int(run.frame.buttons['max_plots'].get()) common_variables = run.common_variables reg_module = run.last_reg_module latent_variables = np.swapaxes(reg_module.x_weights_, 0, 1) #print("this is working")last_reg_module #print(latent_variables.shape) ui = run.ui wavenum = run.last_complete_case.wavenumbers if ui['save_check_var']: tempax = common_variables.tempax tempfig = common_variables.tempfig for i, latent_variable in enumerate(latent_variables): ax = fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']) yax_label = 'Latent variable ' + str(i + 1) PLSRsave.plot_component(ax, ui, wavenum, yax_label, latent_variable) run.draw() if ui['save_check_var']: tempax.cla() PLSRsave.plot_component(tempax, ui, wavenum, yax_label, latent_variable) plotFileName = run.filename + '/PLSR latent variable ' + str(i + 1) tempfig.savefig( plotFileName.replace('.', 'p') + ui['file_extension']) return
def run(self): self.fig.clf() ax = fns.add_axis(self.fig, 1) numPulses = 100 #self.average_pulses=[] self.ms = [255] for i, n in enumerate(self.ms): average_2_pulses = averageN(self.pulseIntensity, n) self.plot_me(ax, numPulses) if self.frame.save_check_var.get(): tempfig = self.frame.hidden_figure tempfig.set_size_inches(4 * 1.2, 3 * 1.2) tempfig.set_dpi(300) tempfig.clf() tempax = tempfig.add_subplot(1, 1, 1) tempfig.subplots_adjust(bottom=0.17, left=0.16, right=0.97, top=0.97) self.plot_me(tempax, numPulses) filename = self.frame.name_field_string.get() tempfig.savefig(filename + '.png') tempfig.savefig(filename + '.svg') #ax2=fns.add_axis(self.fig,1) #average_2_pulses=averageN(self.pulseIntensity,8) #ax2.scatter(range(len(average_2_pulses)),average_2_pulses) #if self.ui['save_check']: # self.fig.savefig(self.ui['save_filename']+'.png') return
def run(self): self.fig.clf() ax = fns.add_axis(self.fig, 1) StartWL = 1200 EndWL = 925 minscanlength = np.inf for scan in self.scans: if len(scan) < minscanlength: minscanlength = len(scan) for i, scan in enumerate(self.scans): self.scans[i] = scan[0:minscanlength] n = 255 self.scans[i] = averageN(self.scans[i], n) self.scans = np.array(self.scans) self.averagescans = np.average(self.scans, axis=0) for i, scan in enumerate(self.scans): self.scans[i] = scan / self.averagescans for i, scan in enumerate(self.scans): if i > 0: s = signal_alignment.chisqr_align(self.scans[0], scan, [0, 20000], init=0, bound=50) print(s) self.scans[i] = signal_alignment.shift(scan, s, mode='nearest') #StartWL=1200 #EndWL=925 #self.wavenumbers=StartWL+(EndWL-StartWL)*np.arange(minscanlength)/minscanlength StartWL = 1200 EndWL = 925 self.wavenumbers = StartWL + ( EndWL - StartWL) * np.arange(minscanlength) / minscanlength numPulses = 1000 step = 100 self.ms = [1, 2, 5, 10] self.averaged_scans = [] for i, m in enumerate(self.ms): self.averaged_scans.append(copy.deepcopy(self.scans)) self.averaged_scans[-1] = averageM(self.averaged_scans[-1], m) self.plot_me(ax, step, numPulses, EndWL, StartWL) if self.frame.save_check_var.get(): tempfig = self.frame.hidden_figure tempfig.set_size_inches(4 * 1.2, 3 * 1.2) tempfig.set_dpi(300) tempfig.clf() tempax = tempfig.add_subplot(1, 1, 1) tempfig.subplots_adjust(bottom=0.17, left=0.16, right=0.97, top=0.97) self.plot_me(tempax, step, numPulses, EndWL, StartWL) filename = self.frame.name_field_string.get() tempfig.savefig(filename + '.png') tempfig.savefig(filename + '.svg') return
def get_or_make_absorbance_ax(run): ui=run.ui ui['fig_per_row']=int(run.frame.buttons['fig_per_row'].get()) ui['max_plots']=int(run.frame.buttons['max_plots'].get()) wavenumbers=run.original_wavenumbers common_variables=run.common_variables fig=common_variables.fig for ax in fig.axes: if hasattr(ax,'plot_type') and ax.plot_type=='absorbance': return ax ax=fns.add_axis(fig,ui['fig_per_row'],ui['max_plots']) datapoints=common_variables.datapoints PlotAbsorbance(ax,fig,datapoints,ui,wavenumbers,common_variables.original_T.X,common_variables.original_V.X) ax.plot_type='absorbance' return ax
def plot_feature_importance(event): run = PLSR.run if not run.ui['reg_type'] == 'Tree': return """Function for plotting the feature importance of features in the Random Forest Regressor. Feature importance is shown in a plot overlaying the data plot. The feature importance is also saved in a separate plot if the "Save" option is selected.""" feature_importance = run.last_reg_module.regr.feature_importances_ common_variables = run.common_variables wavenum = run.last_complete_case.wavenumbers ui = run.ui ui['fig_per_row'] = int(run.frame.buttons['fig_per_row'].get()) ui['max_plots'] = int(run.frame.buttons['max_plots'].get()) ax = PLSRsave.get_or_make_absorbance_ax(run) PLSRsave.add_feature_importance_twinx(ax, common_variables, ui, xax, feature_importance) ax = fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']) PLSRsave.plot_feature_importance(ax, common_variables, ui, xax, feature_importance) if ui['save_check_var']: tempax = common_variables.tempax tempfig = common_variables.tempfig common_variables.tempfig.subplots_adjust(bottom=0.13, left=0.15, right=0.85, top=0.97) PLSRsave.PlotAbsorbance(tempax, tempfig, run.last_complete_case.active_wavenumers, ui, wavenumbers, common_variables.original_T.X, common_variables.original_V.X) twinx = PLSRsave.add_feature_importance_twinx(tempax, common_variables, run.ui, xax, feature_importance) plotFileName = run.filename + '/transmissionFullAndFeatureImportance' tempfig.savefig(plotFileName.replace('.', 'p') + ui['file_extension']) tempax.cla() twinx.remove() common_variables.tempfig.subplots_adjust(bottom=0.13, left=0.15, right=0.97, top=0.97) PLSRsave.plot_feature_importance(tempax, common_variables, ui, xax, feature_importance) plotFileName = run.filename + '/FeatureImportance' tempfig.savefig(plotFileName.replace('.', 'p') + ui['file_extension']) tempax.cla() run.draw()
def plot_node_correlations(event): run = PLSR.run if not run.ui['reg_type'] == 'NeuralNet': return print(run.complete_cases[-1].keywords['NN_type']) if run.complete_cases[-1].keywords['NN_type'] == 'Convolutional': return stability_selection(event) return if not hasattr(run, 'last_Xval_case'): print('Not done running') return ui = run.ui ui['fig_per_row'] = int(run.frame.buttons['fig_per_row'].get()) ui['max_plots'] = int(run.frame.buttons['max_plots'].get()) ax = fns.add_axis(run.common_variables.fig, ui['fig_per_row'], ui['max_plots']) ax.plot_type = 'NN node map' V = run.last_Xval_case.V if len(V.X) == 0: V = run.last_Xval_case.T transformedDataset = run.last_reg_module.scaler.transform(V.X) values = run.last_reg_module.neural_net.get_values(transformedDataset) run.NNvalues = values y_midpoint = run.last_reg_module.neural_net.layer_size / 2 X = [] Y = [] corr = [] y_rot = np.rot90(np.atleast_2d(V.Y), -1) corr_param = run.last_reg_module.neural_net.y_scaler.transform( y_rot).reshape(-1) for j in range((len(values) + 2) // 3): layer = values[j * 3] shape = layer.shape corr.append([]) for i in range(shape[1]): slope, intercept, r_value, p_value, std_err = scipy.stats.linregress( layer[:, i], corr_param) corr[-1].append(r_value**2) X.append(j) Y.append(i) run.corr = corr flat_corr = [item for sublist in corr for item in sublist] sc = ax.scatter(X, Y, c=flat_corr, cmap='viridis') cbar = run.fig.colorbar(sc, ax=ax) cbar.set_label(r'r$^2$') run.draw()
def do_stability_selection(self,run): positively_changed_weights=self.stability_selection_pass(run.last_Xval_case.T) for i in range(100-1): positively_changed_weights+=self.stability_selection_pass(run.last_Xval_case.T) print(str(i+1)+' of 100') run.draw() ui=run.ui ui['fig_per_row']=int(run.frame.buttons['fig_per_row'].get()) ui['max_plots']=int(run.frame.buttons['max_plots'].get()) import fns ax=fns.add_axis(run.common_variables.fig,ui['fig_per_row'],ui['max_plots']) positively_changed_weights=positively_changed_weights.reshape((-1,self.layer_size)) wavenum=run.last_complete_case.wavenumbers dl=(len(wavenum)-len(positively_changed_weights))//2 wavenum=wavenum[dl:-dl] for i,a in enumerate(positively_changed_weights[0]): ax.plot(wavenum,positively_changed_weights[:,i])#,label=i) ax.invert_xaxis() #ax.legend() #ax.plot(positively_changed_weights) run.draw()
def run_wrapper_case(self): fig = self.fig locations = self.locations frame = self.frame ui = self.ui eprint('running') self.fig = fig fig.clf() self.frame = frame # get variables from buttons common_variables = types.SimpleNamespace() common_variables.draw = self.draw self.common_variables = common_variables common_variables.keyword_lists = {} PLSRregressionMethods.get_relevant_keywords(common_variables, ui) ui['multiprocessing'] = 1 - (ui['no_multiprocessing']) save_check_var = frame.save_check_var.get() ui['save_check_var'] = save_check_var filename = frame.name_field_string.get() self.filename = filename #prepare figures for display (set correct number of axes, each pointing to the next axis) ######################### if crossval and moving window -> stop ########### if ui['is_validation'] == 'X-val on training' and ui[ 'regression_wavelength_selection'] == 'Moving window': print("Use of x-validation with moving window is not supported") return ######################### if RMSEP and no validation -> stop ############## if ui['is_validation'] == 'Training' and ui['RMS_type'] == 'RMSEP': print("Unable to calculate RMSEP with only training set") return #################### if RMSEP and RMSEC and no validation -> only RMSEP ### if ui['is_validation'] == 'Training': ui['RMS_type'] = 'RMSEC' if ui['RMS_type'] == 'Default': ui['RMS_type'] = 'RMSEC' else: if ui['RMS_type'] == 'Default': ui['RMS_type'] = 'RMSEP' common_variables.frame = frame common_variables.fig = fig ################################################################################################ ######################### Load data as training or validation ################################## ################################################################################################ T = types.SimpleNamespace() V = types.SimpleNamespace() if len(frame.training_files) == 0: print('training set required') return #load training set T.X, T.Y, common_variables.trainingfiles, self.wavenumbers, self.regressionCurControlTypes = PLSR_file_import.get_files( frame.training_files, ui['max_range']) self.original_wavenumbers = self.wavenumbers for i, contrltytpe in enumerate(self.regressionCurControlTypes): frame.button_handles['cur_col'][i]["text"] = contrltytpe if ui['is_validation'] == 'Training' or ui[ 'is_validation'] == 'X-val on training': # if training or crossval -> deselect validation frame.nav.deselect() #frame.nav.clear_color('color3') #frame.validation_files=frame.nav.get_paths_of_selected_items() V.X = np.array([]) # set empty validation set V.Y = np.array([]) elif ui['is_validation'] == 'Training and Validation': if len(frame.validation_files) == 0: print( 'training and validation set, but no validation set in in put' ) return #load validation set V.X, V.Y, common_variables.validationfiles, _, _2 = PLSR_file_import.get_files( frame.validation_files, ui['max_range']) common_variables.original_T = copy.deepcopy(T) common_variables.original_V = copy.deepcopy(V) ################################################################################################ ################################## load reference spectra ####################################### ################################################################################################ if ui['reference_spectra'] == '': self.reference_spectra = None else: try: temp, _1, _2, _3, _4 = PLSR_file_import.get_files( [ui['reference_spectra']], np.inf) if len(temp) > 0: print( 'first reference spectra in list selected for reference spectra selected as reference spectra' ) self.reference_spectra = np.array(temp[0]) except Exception as e: self.reference_spectra = None print(e) print('error importing referece spectra -> ignoring') if ui['background_spectra'] == '': self.background_spectra = None else: try: temp, _1, _2, _3, _4 = PLSR_file_import.get_files( [ui['background_spectra']], np.inf) if len(temp) > 0: print( 'first background spectra in list selected for reference spectra selected as reference spectra' ) self.background_spectra = np.array(temp[0]) except Exception as e: self.background_spectra = None print(e) print('error importing referece spectra -> ignoring') ################################################################################################ ################# set up folder, save log and temporary figure for saving ###################### ################################################################################################ if save_check_var: if not os.path.exists(filename): os.makedirs(filename) PLSRsave.SaveLogFile(filename, ui, common_variables) common_variables.tempfig, common_variables.tempax = PLSRsave.make_tempfig( ui, frame) ################################################################################################ ############################## calculate window ranges ######################################### ################################################################################################ common_variables.datapoints = np.arange(len(self.wavenumbers)) #common_variables.datapointlists=[common_variables.datapoints]# declare this for get_or_make_absorbance_ax #common_variables.datapoints, common_variables.datapointlists=PLSRpreprocessing.GetDatapoints(self.wavenumbers, ui) ################################################################################################ ################################### save unprocessed spectra ################################### ################################################################################################ if ui['plot_spectra_before_preprocessing']: eprint('plot abs') if ui['save_check_var']: PLSRsave.PlotAbsorbance(common_variables.tempax, common_variables.tempfig, common_variables.datapoints, ui, self.wavenumbers, T.X, V.X) plotFileName = filename + '/SpectraPrePreprocessing' common_variables.tempfig.savefig( plotFileName.replace('.', 'p') + ui['file_extension']) common_variables.tempax.cla() ax = PLSRsave.get_or_make_absorbance_ax(self) self.draw() ################################################################################################ ################################### make pychem input file ##################################### ################################################################################################ if int(ui['make_pyChem_input_file']): if ui['is_validation'] == 'Training and Validation': PLSRsave.writePyChemFile(T.X, T.Y, validation, validationtruevalues) else: PLSRsave.writePyChemFile(T.X, T.Y, [], []) ################################################################################################ ################## set current control and remove data higher than maxrange #################### ################################################################################################ datasets = [T] if ui['is_validation'] == 'Training and Validation': datasets.append(V) for E in datasets: keepsamples = [] for i, _ in enumerate(E.Y): if not E.Y[i, ui['cur_col']] > ui['max_range']: keepsamples.append(i) E.X = E.X[keepsamples, :] E.Y = E.Y[keepsamples, ui['cur_col']] ui['cur_control_string'] = self.regressionCurControlTypes[ ui['cur_col']] PLSRpreprocessing.do_preprocessing(self, T, V) if ui['plot_fourier']: if hasattr(T, 'X_fft'): ax = fns.add_axis(fig, ui['fig_per_row'], ui['max_plots']) PLSRsave.plot_fourier(ax, fig, T, V, ui) self.complete_cases = [] for _ in [1]: # is a loop so that you can use 'break' for i, dercase in enumerate(self.preprocessed_cases): #need to set data range in case of derrivative, rerunn in all cases anyways datapoints = PLSRpreprocessing.GetDatapoints( dercase.wavenumbers, ui) #common_variables.datapoints=datapoints #common_variables.datapointlists=datapointlists if ui['plot_spectra_after_preprocessing']: ax = fns.add_axis(fig, ui['fig_per_row'], ui['max_plots']) PLSRsave.PlotAbsorbance(ax, fig, datapoints, ui, dercase.wavenumbers, dercase.T.X, dercase.V.X, dercase=dercase) self.draw() if ui['save_check_var']: PLSRsave.PlotAbsorbance(common_variables.tempax, common_variables.tempfig, datapoints, ui, dercase.wavenumbers, dercase.T.X, dercase.V.X, dercase=dercase) plotFileName = dercase.folder + '/SpectraPostPreprocessing' common_variables.tempfig.savefig( plotFileName.replace('.', 'p') + ui['file_extension']) common_variables.tempax.cla() for E in [dercase.T, dercase.V]: if len(E.Y) > 0: E.X = E.X[:, datapoints] dercase.wavenumbers = dercase.wavenumbers[datapoints] #create complete cases for all pemutations of keyword values in keyword_lists for keyword_case in PLSRregressionMethods.generate_keyword_cases( common_variables.keyword_lists): self.complete_cases.append(types.SimpleNamespace()) self.complete_cases[-1].wavenumbers = dercase.wavenumbers self.complete_cases[-1].folder = dercase.folder self.complete_cases[-1].sg_config = dercase.sg_config self.complete_cases[-1].derrivative = dercase.derrivative self.complete_cases[-1].T = dercase.T self.complete_cases[-1].V = dercase.V self.complete_cases[ -1].preprocessing_done = dercase.preprocessing_done self.complete_cases[-1].keywords = keyword_case if ui['reg_type'] == 'None': break for case in self.complete_cases: case.XvalRMSEs = [] case.XvalCorrClass = [] common_variables.keywords = case.keywords #GeneticAlgorithm(ui,T,V,datapoints,components) if ui['regression_wavelength_selection'] == 'No wavelength selection': active_wavenumers = np.ones(len(case.wavenumbers), dtype=bool) else: # report to user regarding split module if self.ui['WS_loss_type'] == 'X-validation on training': if self.ui['WS_cross_val_N'] == 1 and self.ui[ 'WS_cross_val_max_cases'] == -1: print('Using sklearn.LeaveOneOut on ' + str(len(case.T.Y)) + ' measurements. Maxcases set to ' + str(len(case.T.Y))) else: if self.ui['WS_cross_val_max_cases'] == -1: print( 'WS_cross_val_max_cases set to -1, GA_cross_val_N not set to 1. Setting GAcross_val_max_cases to default (20)' ) self.ui['WS_cross_val_max_cases'] = 20 if ui['regression_wavelength_selection'] == 'Genetic Algorithm': GAobject = PLSRGeneticAlgorithm.GeneticAlgorithm( common_variables, ui, case) active_wavenumers = GAobject.run( fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']), case.wavenumbers, case.folder, self.draw) elif ui['regression_wavelength_selection'] == 'Moving Window': active_wavenumers = PLSRwavelengthSelection.MW( case, ui, common_variables) elif ui['regression_wavelength_selection'] == 'Sequential Feature Selector': FSobject = PLSRsequential_feature_selectors.sequentialFeatureSelector( common_variables, ui, case, self.draw) active_wavenumers = FSobject.run() Xval_cases = crossval( case.T, case.V, ui, case ) # returns [T],[V] if not crossva, otherwise makes cases from validation dataset for Xval_case in Xval_cases: # ui.datapoints=runGeneticAlgorithm(dercase[0],dercase[1],dercase[2],dercase[3],dercase[4],dercase[5],dercase[6],dercase[7]) #def MW(T,V,wavenumbers, folder,ui,sg_config,curDerivative,supressplot): if ui['save_check_var'] and not ui['do_not_save_plots']: active_wavenumbers_file = case.folder + ui[ 'reg_type'] + PLSRsave.get_unique_keywords_formatted( common_variables.keyword_lists, case.keywords).replace( '.', 'p') + 'active_wavenumers.dpb' PLSRsave.save_active_wavenumbers( active_wavenumbers_file, case.wavenumbers, active_wavenumers) case.active_wavenumers = active_wavenumers self.draw() self.last_reg_module, RMSe = run_reg_module( Xval_case, case, ui, common_variables, active_wavenumers, self.filename + '/results_table', keywords={}) self.draw() self.last_complete_case = case self.last_Xval_case = Xval_case if Xval_case.supressplot == 0: if ui['is_validation'] == 'X-val on training': #if ui['RMS_type']=='Combined RMSEP+RMSEC': # print('RMSEC+RMSEP = '+PLSRsave.custom_round(case.xvalRMSE,3)+' '+ui['unit']) if not 'classifier_type' in case.keywords: case.xvalRMSE = np.sqrt( np.sum(np.array(case.XvalRMSEs)**2) / len(case.XvalRMSEs)) if ui['RMS_type'] == 'RMSEC': print('RMSEC = ' + PLSRsave.custom_round( case.xvalRMSE, 3) + ' ' + ui['unit']) elif ui['RMS_type'] == 'RMSEP': print('RMSEP = ' + PLSRsave.custom_round( case.xvalRMSE, 3) + ' ' + ui['unit']) else: print(case.XvalCorrClass) case.xvalCorrClas = np.average( case.XvalCorrClass) print(case.xvalCorrClas) if ui['RMS_type'] == 'RMSEC': print('x-val corr classifed training = ' + str(round(case.xvalCorrClas * 100, 3)) + ' %') elif ui['RMS_type'] == 'RMSEP': print( 'x-val corr classifed prediction = ' + str(round(case.xvalCorrClas * 100, 3)) + ' %') case.XvalRMSEs = [] eprint('done')
elif ui['coeff_det_type'] == 'R': coeff_det = Xval_case.R_not_squared if reg_module.type == 'classifier': #'classifier_type' in keywords: frac_cor_lab = PLSRclassifiers.get_correct_categorized( case.X_val_Y[-1], case.X_val_pred[-1]) case.XvalCorrClass.append(frac_cor_lab) else: frac_cor_lab = -1 #plot if not supressplot: if not ui['do_not_save_plots']: PLSRsave.plot_regression(Xval_case, case, ui, fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']), keywords, RMSe, coeff_det, frac_cor_lab=frac_cor_lab) if ui['save_check_var']: if not ui['do_not_save_plots']: PLSRsave.plot_regression(Xval_case, case, ui, common_variables.tempax, keywords, RMSe, coeff_det, frac_cor_lab=frac_cor_lab)
def plot_components_PCR(event): run = PLSR.run if not run.ui['reg_type'] == 'PCR': return """Function for making plots of the principal components from PCR.""" ui = run.ui ui['fig_per_row'] = int(run.frame.buttons['fig_per_row'].get()) ui['max_plots'] = int(run.frame.buttons['max_plots'].get()) common_variables = run.common_variables reg_module = run.last_reg_module components = reg_module.pca.components_[:reg_module.components] #print(components) ui = run.ui wavenum = run.last_complete_case.wavenumbers if ui['save_check_var']: tempax = common_variables.tempax tempfig = common_variables.tempfig for i, component in enumerate(components): ax = fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']) yax_label = 'Component ' + str(i + 1) PLSRsave.plot_component(ax, ui, wavenum, yax_label, component) run.draw() if ui['save_check_var']: tempax.cla() PLSRsave.plot_component(tempax, ui, wavenum, yax_label, component) plotFileName = run.filename + '/PCR component ' + str(i + 1) tempfig.savefig( plotFileName.replace('.', 'p') + ui['file_extension']) linreg_coef = reg_module.linreg.coef_ linreg_coef = linreg_coef / sum(linreg_coef) ax = fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']) PLSRsave.plot_component_weights(ax, ui, linreg_coef) if ui['save_check_var']: tempax.cla() PLSRsave.plot_component_weights(tempax, ui, linreg_coef) plotFileName = run.filename + '/PCR Weights' tempfig.savefig(plotFileName.replace('.', 'p') + ui['file_extension']) run.draw() ax = fns.add_axis(common_variables.fig, ui['fig_per_row'], ui['max_plots']) product = np.dot(np.transpose(components), linreg_coef) yax_label = r'Comps$\cdot$weights' PLSRsave.plot_component(ax, ui, wavenum, yax_label, product) if ui['save_check_var']: tempax.cla() PLSRsave.plot_component(tempax, ui, wavenum, yax_label, product) plotFileName = run.filename + '/PCR components times weights' tempfig.savefig(plotFileName.replace('.', 'p') + ui['file_extension']) run.draw() ax = PLSRsave.get_or_make_absorbance_ax(run) PLSRsave.plot_component_weights_twinx(ax, ui, wavenum, yax_label, product) if ui['save_check_var']: tempax = common_variables.tempax tempfig = common_variables.tempfig common_variables.tempfig.subplots_adjust(bottom=0.13, left=0.15, right=0.85, top=0.97) PLSRsave.PlotAbsorbance(tempax, tempfig, run.last_complete_case.active_wavenumers, ui, run.last_complete_case.wavenumbers, common_variables.original_T.X, common_variables.original_V.X) twinx = PLSRsave.plot_component_weights_twinx(tempax, ui, wavenum, yax_label, product) plotFileName = run.filename + '/transmission and PCR components times weights ' tempfig.savefig(plotFileName.replace('.', 'p') + ui['file_extension']) tempax.cla() twinx.remove() common_variables.tempfig.subplots_adjust(bottom=0.13, left=0.15, right=0.97, top=0.97) run.draw() return
def plot_node_activation_vector(event): run = PLSR.run if not run.ui['reg_type'] == 'NeuralNet': return ui = run.ui layer = int(event.xdata + 0.5) node = int(event.ydata + 0.5) try: r2 = run.corr[layer][node] print('node at x', layer, 'y', node, 'r**2', r2) except: print('no node at x', layer, 'y', node) return ui['fig_per_row'] = int(run.frame.buttons['fig_per_row'].get()) ui['max_plots'] = int(run.frame.buttons['max_plots'].get()) ax = fns.add_axis(run.common_variables.fig, ui['fig_per_row'], ui['max_plots']) weights = run.last_reg_module.neural_net.get_weights() V = run.last_Xval_case.V if len(V.X) == 0: V = run.last_Xval_case.T data = run.last_reg_module.scaler.transform(V.X) input_dim = data.shape[1] num_inputs = data.shape[0] print(data.shape) sensitivity = [[]] for i in range(input_dim): sensitivity[-1].append(np.zeros(data.shape)) sensitivity[-1][-1][:, i] = 1 activation = run.last_reg_module.neural_net.activation for ll in range((len(weights) + 2) // 3): sensitivity.append([]) for i in range(input_dim): sensitivity[-1].append(sensitivity[-2][i] @ weights[ll * 3][0][:]) #sensitivity[-1][-1] += weights[ll*3][1] #do not include this, we are calculating derivatives, not response data = data @ weights[ll * 3][0][:] data += weights[ll * 3][1] #add bias if not ll == (len(weights) - 1) // 3: for i in range(input_dim): sensitivity[-1][i] = activation(sensitivity[-1][i], pivot=data) #print(np.sum(sensitivity[-1][-1]==0)) data = activation(data) #for i in range(data.shape[1]): # print(np.sum(data[:,i]==0)) #sensitivity[layer][input][wavenumber,node #sensitivity[layer+1]=np.array(sensitivity[layer+1]) #ode_sensitivity=sensitivity[layer+1][:,:,node] run.sensitivity = sensitivity node_sensitivity = [] for i in range(input_dim): node_sensitivity.append(sensitivity[layer + 1][i][:, node]) node_sensitivity = np.array(node_sensitivity) #print(len(sensitivity),sensitivity[layer+1].shape,node_sensitivity.shape) sense_vector = (np.average(node_sensitivity, axis=1)) sense_std = (np.std(node_sensitivity, axis=1)) #for s in sensitivity[-1]: #print(run.sense_vector[layer+1][node]) wavenum = run.last_complete_case.wavenumbers wavenum = wavenum[run.last_complete_case.active_wavenumers] ax.errorbar(wavenum, sense_vector, yerr=sense_std, color=[1, 0, 0, 1], ecolor=[0, 0, 1, 1]) ax.invert_xaxis() #ax.plot(wavenum,sense_vector) #ax=fns.add_axis(run.common_variables.fig,ui['fig_per_row'],ui['max_plots']) #ax.plot(wavenum,sense_std) '''ax=fns.add_axis(run.common_variables.fig,ui['fig_per_row'],ui['max_plots']) ax.plot(wavenum,node_sensitivity,'+') ax.invert_xaxis() ax=fns.add_axis(run.common_variables.fig,ui['fig_per_row'],ui['max_plots']) ax.plot(wavenum,run.last_reg_module.scaler.transform(V.X).swapaxes(0,1),'+') ax.invert_xaxis()''' '''ax=fns.add_axis(run.common_variables.fig,ui['fig_per_row'],ui['max_plots']) data=run.last_reg_module.neural_net.y_scaler.inverse_transform(data) ax.plot(V.Y,data,'o')''' #ax.plot(sense_std) run.draw() #ax.plot() #values=run.last_reg_module.neural_net.get_values(transformedDataset) '''inv_act=run.last_reg_module.neural_net.inv_activation
T, V, trail_active_wavenumbers, use_stored=True) # done moving window Wresults=Wresults+(Wresults==0)*np.max(Wresults) # set empty datapoints to max value j,i=np.unravel_index(Wresults.argmin(), Wresults.shape) bestVal=Wresults[j,i] bestSize=Wsizes[i] bestStart=j-bestSize//2 # plot MWresults Wresults=np.array(Wresults) # make plot Wwindowsize,Wwavenumbers = np.meshgrid(Wsizes*abs(dw), wavenumbers) unique_keywords=PLSRsave.get_unique_keywords_formatted(common_variables.keyword_lists,keywords) PLSRsave.PcolorMW(Wwavenumbers,Wwindowsize,Wresults,fns.add_axis(common_variables.fig,ui['fig_per_row'],ui['max_plots']),unique_keywords[1:],ui) if ui['save_check_var']: tempCbar=PLSRsave.PcolorMW(Wwavenumbers,Wwindowsize,Wresults,common_variables.tempax,unique_keywords[1:],ui) common_variables.tempfig.subplots_adjust(bottom=0.13,left=0.15, right=0.97, top=0.9) plotFileName=folder+ui['reg_type']+unique_keywords.replace('.','p')+'_moving_window' common_variables.tempfig.savefig(plotFileName+ui['file_extension']) tempCbar.remove() # set result as keywords, so that they are saved bestEnd=bestStart+bestSize Wwidth=wavenumbers[bestStart]-wavenumbers[bestEnd-1] #cm-1 Wcenter=0.5*(wavenumbers[bestStart]+wavenumbers[bestEnd-1]) #cm-1 keywords['MW width']=str(round(Wwidth,1))+r' cm$^{-1}$' keywords['MW center']=str(round(Wcenter,1))+r' cm$^{-1}$' # prepare return vector active_wavenumers=np.zeros(len(wavenumbers), dtype=bool) active_wavenumers[bestStart:bestEnd]=True
def run(self): '''if self.ui['SFS type']=='Forward': return self.forward_selection() if self.ui['SFS type']=='Backwards': return self.backwards_selection() def forward_selection(self):''' wavenumbers = self.case.wavenumbers if 'Forward' in self.ui['SFS type']: direction = 'Forward ' current_active_wavenumbers = np.zeros(len(wavenumbers), dtype=bool) elif 'Backward' in self.ui['SFS type']: direction = 'Backward' current_active_wavenumbers = np.ones(len(wavenumbers), dtype=bool) if self.ui['SFS_floating']: floating = True else: floating = False ax = fns.add_axis(self.common_variables.fig, self.ui['fig_per_row'], self.ui['max_plots']) # calculate the needed X-val splits and store them PLSRwavelengthSelection.WS_getCrossvalSplits([0, 1], self.T, self.V, self.ui, use_stored=False) PLSRsave.PlotChromosomes(ax, wavenumbers, [], self.ui, ylabel='Iteration') if self.ui['SFS type'] == 'Forward': current_active_wavenumbers = np.zeros(len(wavenumbers), dtype=bool) elif self.ui['SFS type'] == 'Backwards': current_active_wavenumbers = np.ones(len(wavenumbers), dtype=bool) best_historic_active = [] best_loss = [] generation = 0 while True: #main step if direction == 'Forward ': trail_active_wavenumbers = self.get_trails_forward( current_active_wavenumbers) else: # direction=='Backward' trail_active_wavenumbers = self.get_trails_backward( current_active_wavenumbers) if len(trail_active_wavenumbers) == 0: break trail_active_wavenumbers = cut_previous(trail_active_wavenumbers, best_historic_active) current_active_wavenumbers, l, out_str = self.do_pass( trail_active_wavenumbers, generation) print(direction + ' ' + out_str) best_loss.append(l) PLSRsave.PlotChromosome(ax, wavenumbers, current_active_wavenumbers, generation) self.draw_fun() best_historic_active.append(copy.copy(current_active_wavenumbers)) best_historic_generation = np.argmin(best_loss) generation += 1 if generation == self.ui['SFS_max_iterations']: break if floating: while True: if direction == 'Forward ': if np.sum(current_active_wavenumbers) == 1: break else: trail_active_wavenumbers = self.get_trails_backward( current_active_wavenumbers ) #reverse of main loop else: # direction=='Backward' if np.sum(current_active_wavenumbers) == len( current_active_wavenumbers): break trail_active_wavenumbers = self.get_trails_forward( current_active_wavenumbers) #reverse of main loop trail_active_wavenumbers = cut_previous( trail_active_wavenumbers, best_historic_active) if len(trail_active_wavenumbers) == 0: break best_trail, l, out_str = self.do_pass( trail_active_wavenumbers, generation) if l < best_loss[-1]: print('Floating' + ' ' + out_str) current_active_wavenumbers = best_trail best_loss.append(l) PLSRsave.PlotChromosome(ax, wavenumbers, current_active_wavenumbers, generation) self.draw_fun() best_historic_active.append( copy.copy(current_active_wavenumbers)) best_historic_generation = np.argmin(best_loss) generation += 1 else: break if generation == self.ui[ 'SFS_max_iterations'] or best_historic_generation < len( best_loss) - self.ui['SFS_num_after_min'] or np.sum( current_active_wavenumbers ) == self.ui['SFS_target']: break print('best iteration ' + str(best_historic_generation + 1) + ', best ' + self.rmse_string + ' = ' + PLSRsave.custom_round(best_loss[best_historic_generation], 2)) PLSRsave.PlotChromosome(ax, wavenumbers, best_historic_active[best_historic_generation], best_historic_generation, color=[1, 0, 0, 1]) if self.ui['save_check_var'] == 1: PLSRsave.PlotChromosomes(self.common_variables.tempax, wavenumbers, best_historic_active, self.ui, ylabel='Iteration') PLSRsave.PlotChromosome( self.common_variables.tempax, wavenumbers, best_historic_active[best_historic_generation], best_historic_generation, color=[1, 0, 0, 1]) self.common_variables.tempfig.subplots_adjust(bottom=0.13, left=0.15, right=0.97, top=0.9) unique_keywords = PLSRsave.get_unique_keywords_formatted( self.common_variables.keyword_lists, self.case.keywords) plotFileName = case.folder + self.ui[ 'reg_type'] + unique_keywords.replace('.', 'p') + 'SFS' self.common_variables.tempfig.savefig( plotFileName.replace('.', 'p') + self.ui['file_extension']) return best_historic_active[best_historic_generation]