def test_Diff_order1(self): f_diff = D(derivative_order=1, axis_name="x") * self.v assert all(f_diff.data[:, 0] == 0) assert all(f_diff.data[:, 1] == 0.5) print(f_diff.name) s_diff = D(derivative_order=1, axis_name="x") * self.sym_v assert str(s_diff) == "-1.0*v(x - 1, y) + 1.0*v(x + 1, y)".replace( ' ', '')
def test_Diff_order2(self): f_diff = D(derivative_order=2, axis_name="x") * self.v assert all(f_diff.data[:, 0] == 0) assert all(f_diff.data[:, 1] == 0) assert all( f_diff.data[2:-2, 2] == 0.5) # there is a difference in the borders for using np.gradient s_diff = D(derivative_order=2, axis_name="x") * self.sym_v assert str( s_diff ) == "-2.0*v(x, y) + 1.0*v(x - 2, y) + 1.0*v(x + 2, y)".replace( ' ', '')
def test_evaluator(self): trainSplit = DataSplit({"x": 0.7}) testSplit = DataSplit({"x": 0.3}, {"x": 0.7}) data_manager = DataManager() data_manager.add_variables(self.v) data_manager.add_variables(self.x) data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(3, "x") * field) pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=20, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(trainSplit), data_manager.get_y_dframe(trainSplit)) print(pde_finder.coefs_) # strange th value obtained real, pred = evaluate_predictions(pde_finder, data_split_operator=testSplit, dm=data_manager, starting_point={"x": -1}, domain_variable2predict="x", horizon=10, num_evaluations=1) assert np.mean( real.drop(["random_split", "method"], axis=1).values - pred.drop(["method"], axis=1).values[1:, :]) < 0.001
def test_integrate(self): trainSplit = DataSplit({"x": 0.7}) testSplit = DataSplit({"x": 0.3}, {"x": 0.7}) data_manager = DataManager() data_manager.add_variables(self.v) data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) data_manager.set_domain() pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=20, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(trainSplit), data_manager.get_y_dframe(trainSplit)) print(pde_finder.coefs_) # strange th value obtained # warning!!! predictions_df = pde_finder.integrate([ DataSplitOnIndex({"x": 5}) * testSplit, DataSplitOnIndex({"x": 20}) * testSplit ], data_manager, starting_point={"x": -1}, domain_variable2predict="x", horizon=10) print(predictions_df)
def test_fit_2(self): data_manager = DataManager() data_manager.add_variables(self.v) data_manager.add_variables(self.v**2) data_manager.set_X_operator( lambda field: Poly(3) * (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=10, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(), data_manager.get_y_dframe()) print(pde_finder.coefs_) # strange th value obtained print((pde_finder.transform(data_manager.get_X_dframe()) - data_manager.get_y_dframe()).abs().mean().values) assert np.max((pde_finder.transform(data_manager.get_X_dframe()) - data_manager.get_y_dframe()).abs().mean().values) < 1e-5 res = pde_finder.get_equation(*data_manager.get_Xy_eq()) print(res) res = pde_finder.get_equation(data_manager.get_X_sym(), data_manager.get_y_sym()) print(res)
def test_get_sym(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) assert str(data_manager.get_X_sym() ) == "[v(x,y), -0.5*v(x-1,y)+0.5*v(x+1,y), x(x)]" assert str(data_manager.get_y_sym() ) == "[-0.5*v(x,y)+0.25*v(x-2,y)+0.25*v(x+2,y)]" data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(1, "x") * field) assert str(data_manager.get_X_sym()) == "[v(x,y), x(x)]" assert str(data_manager.get_y_sym()) == "[-0.5*v(x-1,y)+0.5*v(x+1,y)]"
def test_get_var(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(1, "x") * field) assert all(data_manager.get_X_dframe().columns == ['v(x,y)', 'x(x)']) assert all(data_manager.get_y_dframe().columns == ['1.0*Derivative(v(x,y),x)'])
def test_getXy_eq(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) # print(data_manager.get_Xy_eq()[0].data[1].sym_expression) assert str(data_manager.get_Xy_eq() [0]) == "[v(x,y), 1.0*Derivative(v(x,y),x), x(x)]" assert str( data_manager.get_Xy_eq()[1]) == "[1.0*Derivative(v(x,y),(x,2))]"
def test_Diff_combining_operations(self): f_diff = D(derivative_order=1, axis_name="x") * D( derivative_order=1, axis_name="x") * self.v assert all(f_diff.data[:, 0] == 0) assert all(f_diff.data[:, 1] == 0) assert all(f_diff.data[2:-2, 2] == 0.5) v_diff = D(derivative_order=1, axis_name="x") * D( derivative_order=1, axis_name="y") * self.v w_diff = D(derivative_order=1, axis_name="y") * D( derivative_order=1, axis_name="x") * self.v assert v_diff == w_diff
def y_operator(field): new_field = D(derivative_order_y, "t") * field return new_field
def y_operator(field): new_field = D(self.target_derivative_order, "t") * field return new_field
def y_operator(field): return D(derivative_order['t'] + 1, "t") * field
def y_operator(field): return D(derivative_order=2, axis_name='t') * field
def x_operator(field, regressors): new_field = D(1, 't') * field # M' new_field.append(((D(1, 't') * field)**2)) # (M')^2 new_field.append((field * (D(1, 't') * field))) # MM' return new_field