def advection(): #loadnamenpy = 'advection_marginal_7397.npy' loadnamenpy = 'advection_marginal_6328.npy' loadnamenpy = 'advection_marginal_8028.npy' loadnamenpy = 'advection_marginal_5765.npy' #loadnamenpy = 'advection_marginal_4527.npy' case = '_'.join(loadnamenpy.split('_')[:2]) dataman = DataIO(case) fuk, fu, gridvars, ICparams = dataman.loadSolution(loadnamenpy) grid = PdfGrid(gridvars) V = Visualize(grid) V.plot_fuk3D(fuk) V.plot_fu3D(fu) V.plot_fu(fu, dim='t', steps=5) V.plot_fu(fu, dim='x', steps=5) V.show() # Learn difflearn = PDElearn(fuk, grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt='2ndorder', variableCoef=True, variableCoefBasis='simple_polynomial', variableCoefOrder=3, use_sindy=True, sindy_alpha=0.001)
def analyze(self, adjust=False, plot=False, learn=False, adjustparams={}, learnparams={'feature_opt':'1storder', 'coeforder':1}): dataman = DataIO(self.case) fu, gridvars, ICparams = dataman.loadSolution(self.loadnamenpy, array_opt='marginal') ##Make fu smaller (in time) if adjust: fu, gridvars = self.adjust(fu, gridvars, adjustparams) grid = PdfGrid(gridvars) if plot: V = Visualize(grid) V.plot_fu3D(fu) V.plot_fu(fu, dim='t', steps=5) V.plot_fu(fu, dim='x', steps=5) V.show() if learn: t0 = time.time() print('fu dimension: ', fu.shape) print('fu num elem.: ', np.prod(fu.shape)) feature_opt = learnparams['feature_opt'] coeforder = learnparams['coeforder'] sindy_alpha = learnparams['sindy_alpha'] RegCoef = learnparams['RegCoef'] nzthresh = learnparams['nzthresh'] # Learn difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=self.case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt=feature_opt, variableCoef=True, variableCoefBasis='simple_polynomial', \ variableCoefOrder=coeforder, use_sindy=True, sindy_alpha=sindy_alpha, RegCoef=RegCoef, nzthresh=nzthresh) print('learning took t = ', str(t0 - time.time()))
def advection_reaction(): loadnamenpy = 'advection_reaction_9987.npy' # PDF - gaussians #loadnamenpy = 'advection_reaction_5739.npy' # PDF - gaussians case = '_'.join(loadnamenpy.split('_')[:2]) dataman = DataIO(case) fu, gridvars, ICparams = dataman.loadSolution(loadnamenpy) # Make fu smaller (in time) tt = np.linspace( gridvars['t'][0], gridvars['t'][1], round((gridvars['t'][1] - gridvars['t'][0]) / gridvars['t'][2])) period = 6 indexes = np.array([i * period for i in range((len(tt)) // period)]) ttnew = tt[indexes] fu = fu[:, :, indexes] gridvars['t'][1] = ttnew[-1] gridvars['t'][2] = (ttnew[-1] - ttnew[0]) / len(ttnew) grid = PdfGrid(gridvars) # Learn difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt='1storder', variableCoef=True, variableCoefBasis='simple_polynomial', variableCoefOrder=2, use_sindy=True, sindy_alpha=0.005, shuffle=False)
def burgers(): loadnamenpy = 'burgersMC_9601.npy' # PDF - triangles loadnamenpy = 'burgersMC_6095.npy' # CDF - triangles loadnamenpy = 'burgersMC_4147.npy' # PDF - gaussians #loadnamenpy = 'burgersMC_5042.npy' # CDF - gaussians case = loadnamenpy.split('_')[0] dataman = DataIO(case) fu, gridvars, ICparams = dataman.loadSolution(loadnamenpy) grid = PdfGrid(gridvars) # Learn difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.7, debug=False, verbose=True) difflearn.fit_sparse(feature_opt='1storder', variableCoef=True, variableCoefBasis='simple_polynomial', variableCoefOrder=1, use_sindy=True, sindy_alpha=0.01, shuffle=False)
def reaction(): #loadnamenpy = 'reaction_linear_2204.npy' #loadnamenpy = 'reaction_linear_6632.npy' loadnamenpy = 'reaction_linear_5966.npy' case = '_'.join(loadnamenpy.split('_')[:2]) dataman = DataIO(case) fu, gridvars, ICparams = dataman.loadSolution(loadnamenpy) grid = PdfGrid(gridvars) # Learn difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt='1storder', variableCoef=True, variableCoefBasis='simple_polynomial', variableCoefOrder=2, use_sindy=True, sindy_alpha=0.1)
def learn(self): dataman = DataIO(self.case) fu, gridvars, ICparams = dataman.loadSolution(self.loadnamenpy, array_opt='marginal') grid = PdfGrid(gridvars) feature_opt = '1storder' coeforder = 2 # Learn difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=self.case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt=feature_opt, variableCoef=True, variableCoefBasis='simple_polynomial', \ variableCoefOrder=coeforder, use_sindy=True, sindy_alpha=0.001)
def multi_burgers(self, loadnamenpy): fu_list = [] ICparams_list = [] grid_list = [] # Load simulation results fu, gridvars, ICparams = self.datamanager.loadSolution(loadnamenpy) difflearn = PDElearn(grid=PdfGrid(gridvars), fu=fu, ICparams=ICparams, trainratio=self.trainratio, debug=False, verbose=False) F = Features(scase=self.case, option=self.feature_opt, variableCoef=self.variableCoef, variableCoefOrder=self.variableCoefOrder, variableCoefBasis=self.variableCoefBasis) featurelist, labels, featurenames = F.makeFeatures( PdfGrid(gridvars), fu, ICparams) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets(featurelist, labels, shuffle=False) # Fit data lin, rem_feature_idx = difflearn.train_sindy(Xtrain, ytrain, \ RegCoef=self.RegCoef, maxiter=self.maxiter, tolerance=self.tolerance, sindy_iter=self.sindy_iter, sindy_alpha=self.sindy_alpha) difflearn.print_full_report(lin, Xtrain, ytrain, Xtest, ytest, rem_feature_idx, featurenames) return difflearn, featurenames, Xtrain, ytrain, Xtest, ytest
def multiIC(self): fuk_list = [] fu_list = [] ICparams_list = [] grid_list = [] # Load simulation results for i in range(len(self.loadname_list)): if i >= self.numexamples: break fuk, fu, gridvars, ICparams = self.datamanager.loadSolution( self.loadname_list[i] + '.npy') difflearn = PDElearn(fuk=fuk, grid=PdfGrid(gridvars), fu=fu, ICparams=ICparams, trainratio=self.trainratio, debug=False, verbose=False) F = Features(scase=self.case, option=self.feature_opt, variableCoef=self.variableCoef, variableCoefOrder=self.variableCoefOrder, variableCoefBasis=self.variableCoefBasis) featurelist, labels, featurenames = F.makeFeatures( PdfGrid(gridvars), fu, ICparams) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets(featurelist, labels, shuffle=False) #pdb.set_trace() if i == 0: Xall_train = Xtrain Xall_test = Xtest yall_train = ytrain yall_test = ytest else: Xall_train = np.concatenate((Xall_train, Xtrain), axis=0) yall_train = np.concatenate((yall_train, ytrain), axis=0) Xall_test = np.concatenate((Xall_test, Xtest), axis=0) yall_test = np.concatenate((yall_test, ytest), axis=0) # Fit data lin, rem_feature_idx = difflearn.train_sindy(Xall_train, yall_train, \ RegCoef=self.RegCoef, maxiter=self.maxiter, tolerance=self.tolerance, sindy_iter=self.sindy_iter, sindy_alpha=self.sindy_alpha) difflearn.print_full_report(lin, Xall_train, yall_train, Xall_test, yall_test, rem_feature_idx, featurenames) return difflearn, featurenames, Xall_train, yall_train, Xall_test, yall_test
def runML(setting): version = 1 loadname = [makesavename(i, version) for i in IC] S1 = PdfSolver() fuk = [] fu = [] kmean = [] gridvars = [] ICparams = [] for i in range(len(IC)): fuki, fui, kmeani, gridvarsi, ICparamsi = S1.loadSolution(loadname[i]) fuk.append(fuki) fu.append(fui) kmean.append(kmeani) uu, kk, xx, tt = gridvarsi muk, sigk, mink, maxk, sigu, a, b = ICparamsi grid = PdfGrid() grid.setGrid(xx, tt, uu, kk) grid.printDetails() lmnum = 40 lmmin = 0.0000001 lmmax = 0.00005 lm = np.linspace(lmmin, lmmax, lmnum) options = ['linear', '2ndorder'] if setting == 'sepIC': for opt in options: # Get number of maximum number of coefficients: maxncoef difflearn = PDElearn(fuk[0], grid, kmean[0], fu=fu[0], trainratio=0.8, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) #pdb.set_trace() maxncoef = len(featurenames) - 1 print('#################### %s ########################' % (opt)) DL = [] X = [] y = [] error = [] regopts = 2 er = np.zeros((len(IC), regopts, len(lm))) coef = np.zeros((len(IC), regopts, len(lm), maxncoef)) numcoefl1 = np.zeros((len(IC), len(lm))) for i in range(len(IC)): print('---- Initial Condition ----') print('u0: ' + IC[i]['u0']) print('fu0: ' + IC[i]['fu0']) print('fk: ' + IC[i]['fk']) print('---- ----- ----') difflearn = PDElearn(fuk[i], grid, kmean[i], fu=fu[i], trainratio=0.8, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets( featurelist, labels, shuffle=False) for j in range(len(lm)): lin1 = difflearn.train(Xtrain, ytrain, RegType='L1', RegCoef=lm[j], maxiter=5000, tolerance=0.00001) lin2 = difflearn.train(Xtrain, ytrain, RegType='L2', RegCoef=lm[j], maxiter=5000) DL = [lin1, lin2] for k in range(len(DL)): er[i, k, j] = mean_squared_error(ytest, DL[k].predict(Xtest)) #pdb.set_trace() for l in range(maxncoef): coef[i, k, j, l] = DL[k].coef_[l] numcoefl1[i, j] = DL[0].sparse_coef_.getnnz() ## Plotting # Error as a function of lm fig = plt.figure() leg = [] for i in range(len(IC)): plt.plot(lm, np.reshape(er[i, 0, :], (len(lm), ))) leg.append(makesavename(IC[i], 1)) figname = setting + ' reg coefficients L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Error') plt.title(figname) plt.legend(leg) plt.savefig(FIGFILE + figname + '.pdf') # Sparsity as a function of lm fig = plt.figure() leg = [] for i in range(len(IC)): plt.plot(lm, numcoefl1[i]) leg.append(makesavename(IC[i], 1)) figname = setting + ' LinearIC Sparsity in L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Sparsity: Number of Coeffients') plt.title(figname) plt.legend(leg) plt.savefig(FIGFILE + figname + '.pdf') # All Coefficients values as a function of lm for j in range(len(IC)): fig = plt.figure() leg = [] for i in range(len(featurenames) - 1): plt.plot(lm, np.reshape(coef[j, 0, :, i], (len(lm), ))) leg.append(featurenames[i + 1]) figname = setting + ' Linear features L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Coefficient Values') plt.title(figname) plt.legend(leg) plt.savefig(FIGFILE + figname) plt.show() if setting == 'lumpIC': for opt in options: # Get number of maximum number of coefficients: maxncoef difflearn = PDElearn(fuk[0], grid, kmean[0], fu=fu[0], trainratio=0.8, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) #pdb.set_trace() maxncoef = len(featurenames) - 1 print('#################### %s ########################' % (opt)) DL = [] X = [] y = [] error = [] regopts = 2 er = np.zeros((regopts, len(lm))) coef = np.zeros((regopts, len(lm), maxncoef)) numcoefl1 = np.zeros((len(lm), )) for i in range(len(IC)): difflearn = PDElearn(fuk[i], grid, kmean[i], fu=fu[i], trainratio=0.8, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets( featurelist, labels, shuffle=False) if i == 0: X_train = Xtrain y_train = ytrain X_test = Xtest y_test = ytest np.append(X_train, Xtrain, axis=0) np.append(y_train, ytrain, axis=0) np.append(X_test, Xtest, axis=0) np.append(y_test, ytest, axis=0) for j in range(len(lm)): lin1 = difflearn.train(X_train, y_train, RegType='L1', RegCoef=lm[j], maxiter=5000, tolerance=0.00001) lin2 = difflearn.train(X_train, y_train, RegType='L2', RegCoef=lm[j], maxiter=5000) DL = [lin1, lin2] for k in range(len(DL)): er[k, j] = mean_squared_error(y_test, DL[k].predict(X_test)) for l in range(maxncoef): coef[k, j, l] = DL[k].coef_[l] numcoefl1[j] = DL[0].sparse_coef_.getnnz() ## Plotting # Error as a function of lm fig = plt.figure() leg = [] plt.plot(lm, er[0, :]) figname = setting + ' reg coefficients L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Error') plt.title(figname) plt.savefig(FIGFILE + figname + '.pdf') # Sparsity as a function of lm fig = plt.figure() leg = [] plt.plot(lm, numcoefl1) figname = setting + ' LinearIC Sparsity in L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Sparsity: Number of Coeffients') plt.title(figname) plt.savefig(FIGFILE + figname + '.pdf') # All Coefficients values as a function of lm fig = plt.figure() leg = [] for i in range(len(featurenames) - 1): plt.plot(lm, np.reshape(coef[0, :, i], (len(lm), ))) leg.append(featurenames[i + 1]) figname = setting + ' LinearIC Linear features L%d reg, %s' % (1, opt) plt.xlabel('Regularization Coefficient') plt.ylabel('Coefficient Values') plt.title(figname) plt.legend(leg) plt.savefig(FIGFILE + figname + '.pdf') plt.show()
print('\tMCcount = ', MCcount) print('---------------------') # BUILD PDF MCprocess = MCprocessing(savenameMC, case=case) savenamepdf = MCprocess.buildKDE(nu, distribution=distribution, MCcount=MCcount, save=save, u_margin=u_margin, bandwidth=bandwidth) # LEARN dataman = DataIO(case, directory=PDFDIR) fu, gridvars, ICparams = dataman.loadSolution(savenamepdf, array_opt='marginal') adjustgrid = {'mu':mu, 'mx':mx, 'mt':mt, 'pu':pu, 'px':px, 'pt':pt} grid = PdfGrid(gridvars) fu = grid.adjust(fu, adjustgrid) difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=trainratio, verbose=printlearning) filename = difflearn.fit_sparse(feature_opt=feature_opt, variableCoef=variableCoef, variableCoefBasis=variableCoefBasis, \ variableCoefOrder=coeforder, use_rfe=use_rfe, rfe_alpha=rfe_alpha, nzthresh=nzthresh, maxiter=maxiter, \ LassoType=LassoType, RegCoef=RegCoef, cv=cv, criterion=criterion, print_rfeiter=print_rfeiter, shuffle=shuffle, \ basefile=savenamepdf, adjustgrid=adjustgrid, save=save, normalize=normalize, comments=comments) # Save Learning D = DataIO(case, directory=LEARNDIR) output, metadata = D.readLearningResults(filename) output_vec.append(output) metadata_vec.append(metadata) filename_vec.append(filename) print('files = [') for f in filename_vec:
lmnum = 40 lmmin = 0.0000001 lmmax = 0.00003 lm = np.linspace(lmmin, lmmax, lmnum) options = ['linear', '2ndorder'] error = [] cf = [] fn = [] for opt in options: # Get number of maximum number of coefficients: maxncoef difflearn = PDElearn(fuk[0], grid, kmean[0], fu=fu[0], trainratio=0.8, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures(option=opt) #pdb.set_trace() maxncoef = len(featurenames) - 1 print('#################### %s ########################' % (opt)) DL = [] regopts = 2 er = np.zeros((regopts, len(lm))) coef = np.zeros((regopts, len(lm), maxncoef)) numcoefl1 = np.zeros((len(lm), )) for i in range(len(IC)): difflearn = PDElearn(fuk[i],
def runML(): loadname1 = 'u0exp_fu0gauss_fkgauss_1' loadname2 = 'u0lin_fu0gauss_fkgauss_1' loadname3 = 'u0lin_fu0gauss_fkuni_1' loadname4 = 'u0exp_fu0gauss_fkuni_1' loadname = [loadname1, loadname2, loadname3, loadname4] #loadname1 = 'u0exp_fu0gauss_fkgauss_0' #loadname2 = 'u0lin_fu0gauss_fkgauss_0' #loadname3 = 'u0lin_fu0gauss_fkuni_0' #loadname4 = 'u0exp_fu0gauss_fkuni_0' #loadname = [loadname1,loadname2,loadname3,loadname4] S1 = PdfSolver() fuk = [] fu = [] kmean = [] gridvars = [] ICparams = [] for i in range(4): fuki, fui, kmeani, gridvarsi, ICparamsi = S1.loadSolution(loadname[i]) fuk.append(fuki) fu.append(fui) kmean.append(kmeani) uu, kk, xx, tt = gridvarsi muk, sigk, mink, maxk, sigu, a, b = ICparamsi grid = PdfGrid() grid.setGrid(xx, tt, uu, kk) grid.printDetails() # Train on dataset 1 p = (0, 1, 2, 3) options = ['all', 'linear', '2ndorder'] for opt in options: print('#################### %s ########################' % (opt)) DL = [] X = [] y = [] for i in p: difflearn = PDElearn(fuk[i], grid, kmean[i], fu=fu[i], trainratio=1, debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets(featurelist, labels, shuffle=False) X.append(Xtrain) y.append(ytrain) DL.append(difflearn) for ti in range(4): print('\n ###### Training on i = %d ###### \n ' % (ti)) lin1 = DL[ti].train(X[ti], y[ti], RegType='L1', RegCoef=0.000001, maxiter=5000, tolerance=0.00001) lin2 = DL[ti].train(X[ti], y[ti], RegType='L2', RegCoef=0.01, maxiter=5000) lin0 = DL[ti].train(X[ti], y[ti], RegType='L0') for i in range(4): print('---- %d ----' % (i)) print(loadname[i]) print("L1 Reg Test Score = %5.3f | RMS = %7.5f" % (lin1.score( X[i], y[i]), mean_squared_error(y[i], lin1.predict(X[i])))) print("L2 Reg Test Score = %5.3f | RMS = %7.5f" % (lin2.score( X[i], y[i]), mean_squared_error(y[i], lin2.predict(X[i])))) print("L0 Reg Test Score = %5.3f | RMS = %7.5f" % (lin0.score( X[i], y[i]), mean_squared_error(y[i], lin0.predict(X[i]))))
def runML(setting): Tinc = 10 Tmin = 0.3 Tmax = 0.9 T = np.linspace(Tmin, Tmax, Tinc) version = 1 loadname = [makesavename(i, version) for i in IC] S1 = PdfSolver() fuk = [] fu = [] kmean = [] gridvars = [] ICparams = [] for i in range(len(IC)): fuki, fui, kmeani, gridvarsi, ICparamsi = S1.loadSolution(loadname[i]) fuk.append(fuki) fu.append(fui) kmean.append(kmeani) uu, kk, xx, tt = gridvarsi muk, sigk, mink, maxk, sigu, a, b = ICparamsi grid = PdfGrid() grid.setGrid(xx, tt, uu, kk) grid.printDetails() if setting == 'sepIC': options = ['linear', '2ndorder'] for opt in options: print('#################### %s ########################' % (opt)) DL = [] X = [] y = [] error = [] er = np.zeros((len(IC), 3, len(T))) for i in range(len(fuk)): print('---- Initial Condition ----') print('u0: ' + IC[i]['u0']) print('fu0: ' + IC[i]['fu0']) print('fk: ' + IC[i]['fk']) print('---- ----- ----') for j in range(len(T)): print('\n ###### Training %3.2f percent ###### \n ' % (T[j])) difflearn = PDElearn(fuk[i], grid, kmean[i], fu=fu[i], trainratio=T[j], debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets( featurelist, labels, shuffle=False) lin0 = difflearn.train(Xtrain, ytrain, RegType='L0') lin1 = difflearn.train(Xtrain, ytrain, RegType='L1', RegCoef=0.000001, maxiter=5000, tolerance=0.00001) lin2 = difflearn.train(Xtrain, ytrain, RegType='L2', RegCoef=0.01, maxiter=5000) DL = [lin0, lin1, lin2] for k in range(len(DL)): # Do it for each initial condition er[i, k, j] = mean_squared_error(ytest, DL[k].predict(Xtest)) ## Plotting for l in range(len(DL)): fig = plt.figure() leg = [] for i in range(len(IC)): plt.plot(T, np.reshape(er[i, l, :], (len(T), ))) leg.append(makesavename(IC[i], 1)) plt.xlabel('Training Time Span (\%)') plt.ylabel('Error') plt.title('Time Generalization for L%d reg, %s' % (l, opt)) plt.legend(leg) plt.show() if setting == 'lumpIC': #### Lump initial conditions #### opt = 'linear' DL = [] er = np.zeros((3, len(T))) for j in range(len(T)): print('\n ###### Training %3.2f percent ###### \n ' % (T[j])) for i in range(len(IC)): difflearn = PDElearn(fuk[i], grid, kmean[i], fu=fu[i], trainratio=T[j], debug=False) featurelist, labels, featurenames = difflearn.makeFeatures( option=opt) Xtrain, ytrain, Xtest, ytest = difflearn.makeTTsets( featurelist, labels, shuffle=False) if i == 0: X_train = Xtrain y_train = ytrain X_test = Xtest y_test = ytest np.append(X_train, Xtrain, axis=0) np.append(y_train, ytrain, axis=0) np.append(X_test, Xtest, axis=0) np.append(y_test, ytest, axis=0) lin0 = difflearn.train(X_train, y_train, RegType='L0') lin1 = difflearn.train(X_train, y_train, RegType='L1', RegCoef=0.00001, maxiter=5000, tolerance=0.00001) lin2 = difflearn.train(X_train, y_train, RegType='L2', RegCoef=0.01, maxiter=5000) DL = [lin0, lin1, lin2] for k in range(len(DL)): # Do it for each initial condition er[k, j] = mean_squared_error(y_test, DL[k].predict(X_test)) ## Plotting for l in range(len(DL)): fig = plt.figure() figname = 'Time Generalization L%d reg - linear lumped IC' % (l) plt.plot(T, er[l, :]) plt.xlabel('Training Time Span (\%)') plt.ylabel('Error') plt.title(figname) fig.savefig(figname + '.pdf') plt.show()
RegCoef = 0.000004 maxiter = 10000 if "savenamepdf" not in locals(): # Check if there is already a loadfile (if not load it) savenamepdf = 'advection_reaction_analytical_717_944.npy' dataman = DataIO(case) fu, gridvars, ICparams = dataman.loadSolution(savenamepdf, array_opt='marginal') grid = PdfGrid(gridvars) fu = grid.adjust(fu, aparams) if plot: s = 10 V = Visualize(grid) V.plot_fu3D(fu) V.plot_fu(fu, dim='t', steps=s) V.plot_fu(fu, dim='x', steps=s) V.show() difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.8, debug=False, verbose=True) difflearn.fit_sparse(feature_opt=feature_opt, variableCoef=True, variableCoefBasis='simple_polynomial', \ variableCoefOrder=coeforder, use_sindy=True, sindy_alpha=sindy_alpha, RegCoef=RegCoef, nzthresh=nzthresh, maxiter=maxiter)
testScorevec = [] MCprocess = MCprocessing(savefilename) for idx, MCcount in enumerate(MCvec): fu, gridvars, ICparams = MCprocess.buildKDE(nu, partial_data=True, MCcount=MCcount, save=False, plot=False, distribution=distribution) grid = PdfGrid(gridvars) difflearn = PDElearn(grid=grid, fu=fu, ICparams=ICparams, scase=case, trainratio=0.8, debug=False, verbose=True) coef, featurenames, trainRMSE, testRMSE, trainScore, testScore = difflearn.fit_sparse(feature_opt='1storder', \ variableCoef=True, variableCoefBasis='simple_polynomial', variableCoefOrder=1, \ use_sindy=True, sindy_alpha=0.001, shuffle=False) coefvec.append(coef) featurenamesvec.append(featurenames) trainRMSEvec.append(trainRMSE) testRMSEvec.append(testRMSE) trainScorevec.append(trainScore) testScorevec.append(testScore) savedata = {