def x_operator(field, regressors): new_field = copy.deepcopy(field) new_field = PolyD(derivative_order) * new_field new_field.append(regressors) # if rational: # new_field.append(new_field.__rtruediv__(1.0)) new_field = (Poly(polynomial_order) * new_field) new_field = Field([ var for var in new_field.data if not np.allclose(var.data, 1) or '1.000' in var.get_full_name() ]) return new_field
def test_evaluator(self): trainSplit = DataSplit({"x": 0.7}) testSplit = DataSplit({"x": 0.3}, {"x": 0.7}) data_manager = DataManager() data_manager.add_variables(self.v) data_manager.add_variables(self.x) data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(3, "x") * field) pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=20, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(trainSplit), data_manager.get_y_dframe(trainSplit)) print(pde_finder.coefs_) # strange th value obtained real, pred = evaluate_predictions(pde_finder, data_split_operator=testSplit, dm=data_manager, starting_point={"x": -1}, domain_variable2predict="x", horizon=10, num_evaluations=1) assert np.mean( real.drop(["random_split", "method"], axis=1).values - pred.drop(["method"], axis=1).values[1:, :]) < 0.001
def test_integrate(self): trainSplit = DataSplit({"x": 0.7}) testSplit = DataSplit({"x": 0.3}, {"x": 0.7}) data_manager = DataManager() data_manager.add_variables(self.v) data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) data_manager.set_domain() pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=20, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(trainSplit), data_manager.get_y_dframe(trainSplit)) print(pde_finder.coefs_) # strange th value obtained # warning!!! predictions_df = pde_finder.integrate([ DataSplitOnIndex({"x": 5}) * testSplit, DataSplitOnIndex({"x": 20}) * testSplit ], data_manager, starting_point={"x": -1}, domain_variable2predict="x", horizon=10) print(predictions_df)
def test_fit_2(self): data_manager = DataManager() data_manager.add_variables(self.v) data_manager.add_variables(self.v**2) data_manager.set_X_operator( lambda field: Poly(3) * (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) pde_finder = PDEFinder(with_mean=True, with_std=True) pde_finder.set_fitting_parameters(cv=10, n_alphas=100, alphas=None) pde_finder.fit(data_manager.get_X_dframe(), data_manager.get_y_dframe()) print(pde_finder.coefs_) # strange th value obtained print((pde_finder.transform(data_manager.get_X_dframe()) - data_manager.get_y_dframe()).abs().mean().values) assert np.max((pde_finder.transform(data_manager.get_X_dframe()) - data_manager.get_y_dframe()).abs().mean().values) < 1e-5 res = pde_finder.get_equation(*data_manager.get_Xy_eq()) print(res) res = pde_finder.get_equation(data_manager.get_X_sym(), data_manager.get_y_sym()) print(res)
def x_operator(field, regressors): new_field = copy.deepcopy(field) if derivative_order > 0: new_field = PolyD({'t': derivative_order}) * new_field new_field.append(regressors) if rational: new_field.append(new_field.__rtruediv__(1.0)) new_field = Poly(polynomial_order) * new_field return new_field
def test_get_sym(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) assert str(data_manager.get_X_sym() ) == "[v(x,y), -0.5*v(x-1,y)+0.5*v(x+1,y), x(x)]" assert str(data_manager.get_y_sym() ) == "[-0.5*v(x,y)+0.25*v(x-2,y)+0.25*v(x+2,y)]" data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(1, "x") * field) assert str(data_manager.get_X_sym()) == "[v(x,y), x(x)]" assert str(data_manager.get_y_sym()) == "[-0.5*v(x-1,y)+0.5*v(x+1,y)]"
def integrate2(self, dm, dery, starting_point, horizon, method='Euler'): """ :param split_data_operator_list: :type dm: DataManager :param starting_point: when more than one variable it is used to define the other domain points. :type starting_point: dict :type domain_variable2predict: str :type horizon: int :return: """ assert len(dm.domain) == 1, "only works with 1d variables." ax_name = dm.domain.axis_names[0] var_names = [var.get_full_name() for var in dm.field.data] eq_x_sym_expression, eq_y_sym_expression = dm.get_Xy_eq() ode_func = get_func_for_ode(eq_x_sym_expression.matmul(self.coefs_.T), eq_y_sym_expression, dm.regressors) split_data_operator = DataSplitIndexClip(axis_start_dict=starting_point, axis_len_dict={ax_name: 2*dery}) new_dm = DataManager() new_dm.add_variables(split_data_operator * dm.field) new_dm.add_regressors(split_data_operator * dm.regressors) new_dm.set_X_operator(dm.X_operator) new_dm.set_y_operator(dm.y_operator) new_dm.set_domain() init_point = starting_point.copy() # get derivatives up to the unknown v0 = [] term_names = [] for sym_var, var in zip(new_dm.sym_field.data, new_dm.field.data): terms = [var.name.diff(ax_name, i) for i in range(dery)] v0_temp = (PolyD(derivative_order_dict={ax_name: dery - 1}) * var).evaluate_ix(init_point) v0_temp = [v0_temp[str(f).replace(' ', '')] if i == 0 else v0_temp['1.0*' + str(f).replace(' ', '')] for i, f in enumerate(terms)] v0 += v0_temp term_names += terms t0 = new_dm.domain.get_value_from_index(ax_name, init_point[ax_name]) t = np.arange(t0, t0 + (dery + horizon) * new_dm.domain.step_width[ax_name], new_dm.domain.step_width[ax_name]) v = scipy.integrate.odeint(func=ode_func, y0=v0, t=t) if len(v.shape) == 1: v = v.reshape((-1, 1)) # v = odeint(ode_func, v0, t) df_pred = pd.DataFrame(v[-horizon:, np.linspace(0, v.shape[1], len(var_names)+1, dtype=int)[:-1]], index=list(range(horizon)), columns=var_names) df_pred = df_pred.astype(float) return df_pred
def test_get_var(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: PolyD({"x": 1}) * field) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(1, "x") * field) assert all(data_manager.get_X_dframe().columns == ['v(x,y)', 'x(x)']) assert all(data_manager.get_y_dframe().columns == ['1.0*Derivative(v(x,y),x)'])
def test_getXy_eq(self): data_manager = DataManager() data_manager.add_variables([self.v]) data_manager.add_regressors(self.x) data_manager.set_domain() data_manager.set_X_operator( lambda field: (PolyD({"x": 1}) * field)) # (PolyD({"x": 1}) data_manager.set_y_operator(lambda field: D(2, "x") * field) # print(data_manager.get_Xy_eq()[0].data[1].sym_expression) assert str(data_manager.get_Xy_eq() [0]) == "[v(x,y), 1.0*Derivative(v(x,y),x), x(x)]" assert str( data_manager.get_Xy_eq()[1]) == "[1.0*Derivative(v(x,y),(x,2))]"
def x_operator(field, regressors): 'field = [M, C]' new_field = copy.deepcopy(field) new_field = PolyD({'t': target_derivative_order - 1}) * new_field "[M, C, M', C', M'', C'' ...]" if rational: new_field.append(new_field.__rtruediv__(1.0)) new_field = Poly(polynomial_order=polynomial_order) * new_field "[1, M, C, M', C', M'', C'' ..., MC, MM'', MCM'...]" return new_field
def get_v0(data_manager, dery): ax_name = data_manager.domain.axis_names[0] starting_point = {ax_name: -1} # make predictions to the future. init_point = starting_point.copy() # get derivatives up to the unknown v0 = [] for sym_var, var in zip(data_manager.sym_field.data, data_manager.field.data): terms = [var.name.diff(ax_name, i) for i in range(dery)] v0_temp = (PolyD(derivative_order_dict={ax_name: dery - 1}) * var).evaluate_ix(init_point) v0_temp = [v0_temp[str(f).replace(' ', '')] if i == 0 else v0_temp['1.0*' + str(f).replace(' ', '')] for i, f in enumerate(terms)] v0 += v0_temp last_time = data_manager.domain.upper_limits[ax_name] - data_manager.domain.step_width[ax_name] * (dery - 1) return v0, last_time
def test_over_SymVariables(self): polyv = PolyD(derivative_order_dict={"x": 2, "y": 2}) * self.sym_v # [print(e) for e in polyv.data] assert len(polyv) == 9
def test_over_variables(self): polyv = PolyD(derivative_order_dict={"x": 2, "y": 2}) * self.v assert len(polyv) == 9
def integrate(self, split_data_operator_list, dm, starting_point, domain_variable2predict, horizon, method='Euler'): """ :param split_data_operator_list: :type dm: DataManager :param starting_point: when more than one variable it is used to define the other domain points. :type starting_point: dict :type domain_variable2predict: str :type horizon: int :return: """ assert len(dm.domain) == 1, "only works with 1d variables." ax_name = dm.domain.axis_names[0] eq_x_sym_expression, eq_y_sym_expression = dm.get_Xy_eq() eq_x_sym_expression = eq_x_sym_expression.matmul(self.coefs_.T).data[0].sym_expression eq_y_sym_expression = eq_y_sym_expression.data[0].sym_expression der_atoms = get_sorted_derivative_atoms(eq_x_sym_expression - eq_y_sym_expression) ode_func = get_func_for_ode(eq_x_sym_expression, eq_y_sym_expression) var_names = [var.get_full_name() for var in dm.field.data] df_predictions_list = [] for split_data_operator in tqdm(split_data_operator_list): new_dm = DataManager() new_dm.add_variables(split_data_operator * dm.field) new_dm.add_regressors(split_data_operator * dm.regressors) # add regressors without splitting new_dm.set_X_operator(dm.X_operator) new_dm.set_y_operator(dm.y_operator) new_dm.set_domain() sub_original_field = new_dm.field eq = self.get_equation(new_dm.get_X_sym(), new_dm.get_y_sym()).data # --------------- first pass to get the starting point --------------------- for sym_eq, original_var, var_name in zip(eq, sub_original_field.data, var_names): backward_lag, forward_lag = get_lag_from_sym_expression(sym_eq.sym_expression) init_point = starting_point.copy() init_point[domain_variable2predict] = sym_eq.domain.shape[domain_variable2predict] - \ forward_lag[domain_variable2predict] v0 = (PolyD(derivative_order_dict={ax_name: len(der_atoms)}) * new_dm.field).evaluate_ix(init_point) v0 = [v0[str(f).replace(' ', '')] if i == 0 else v0['1.0*' + str(f).replace(' ', '')] for i, f in enumerate([dm.field.data[0].name] + der_atoms[:-1])] t0 = new_dm.domain.get_value_from_index(ax_name, init_point[ax_name]) t = np.arange(t0, t0 + (forward_lag[domain_variable2predict] + horizon) * new_dm.domain.step_width[ax_name], new_dm.domain.step_width[ax_name]) # ode_func = get_func_for_ode(eq_x_sym_expression, eq_y_sym_expression) # solver = getattr(odespy, method)(ode_func) # # solver = method(ode_func) # solver.set_initial_condition(v0) # v, t = solver.solve(t) v = scipy.integrate.odeint(func=ode_func, y0=v0, t=t) # v = self.integrator_core(method, t, v0, get_func_for_ode, eq_x_sym_expression, eq_y_sym_expression) # v = odeint(ode_func, v0, t, hmax=new_dm.domain.step_width["t"], hmin=new_dm.domain.step_width["t"], # h0=new_dm.domain.step_width["t"]) df_pred = pd.DataFrame(v[-horizon:, 0], index=list(range(horizon)), columns=var_names) df_pred = df_pred.astype(float) df_predictions_list.append(df_pred) return df_predictions_list