Example #1
0
def sindy(n, p, a, thres):
    if n == 2:
        functions = [
            lambda x1, y1, x2, y2: (x2 - x1) / (
                (x2 - x1)**2 + (y2 - y1)**2)**(3 / 2), lambda x1, y1, x2, y2:
            (y2 - y1) / ((x2 - x1)**2 + (y2 - y1)**2)**(3 / 2)
        ]

        lib_custom = CustomLibrary(library_functions=functions)
        optimizer = ps.STLSQ(threshold=thres)

        t = np.arange(0, p.shape[0], 1)

        model = ps.SINDy(feature_library=lib_custom,
                         optimizer=optimizer,
                         feature_names=['x0', 'y0', 'x1', 'y1', 'x2', 'y2'])

        model.fit(p, t=t, x_dot=a)
        model.print(lhs=["x0''", "y0''", "x1''", "y1''"])
        coef = model.coefficients()
        print(coef)
        return coef
    elif n == 3:
        functions = [
            lambda x0, y0, x1, y1, x2, y2: (x1 - x0) / ((x1 - x0)**2 +
                                                        (y1 - y0)**2)**(3 / 2),
            lambda x0, y0, x1, y1, x2, y2: (y1 - y0) / ((x1 - x0)**2 +
                                                        (y1 - y0)**2)**(3 / 2),
            lambda x0, y0, x1, y1, x2, y2: (x2 - x0) / ((x2 - x0)**2 +
                                                        (y2 - y0)**2)**(3 / 2),
            lambda x0, y0, x1, y1, x2, y2: (y2 - y0) / ((x2 - x0)**2 +
                                                        (y2 - y0)**2)**(3 / 2),
            lambda x0, y0, x1, y1, x2, y2: (x2 - x1) / ((x2 - x1)**2 +
                                                        (y2 - y1)**2)**(3 / 2),
            lambda x0, y0, x1, y1, x2, y2: (y2 - y1) / ((x2 - x1)**2 +
                                                        (y2 - y1)**2)**(3 / 2)
        ]

        lib_custom = CustomLibrary(library_functions=functions)
        optimizer = ps.STLSQ(threshold=thres)

        t = np.arange(0, p.shape[0], 1)

        model = ps.SINDy(feature_library=lib_custom,
                         optimizer=optimizer,
                         feature_names=['x0', 'y0', 'x1', 'y1', 'x2', 'y2'])

        model.fit(p, t=t, x_dot=a)
        model.print(lhs=["x0''", "y0''", "x1''", "y1''", "x2''", "y2''"])
        coef = model.coefficients()
        print(coef)
        return coef
    else:
        print('Number of bodies not supported')
Example #2
0
    def test_default_5d_model(self):
        contents = read_file("random_5d.csv", "")
        time_series, dt, contents, variable_names = clean_contents(contents)
        model = ps.SINDy(feature_names=variable_names)
        model.fit(contents, t=dt)

        _ = model.coefficients()
        actual_score = model.score(contents, t=time_series)

        # expected_coefficients should be incomprehensible
        expected_score_max = 0.1  # model is expected to fail

        # assert (pytest.approx(actual_co, 0.1) == expected_co)
        assert actual_score < expected_score_max  # model is expected to score poorly (less than 0.1)
Example #3
0
    def train(self, trajs, xdot=None, silent=False):
        X = [traj.obs for traj in trajs]
        U = [traj.ctrls for traj in trajs]

        #basis_funcs = [get_constant_basis_func(), get_identity_basis_func()]
        basis_funcs = [get_identity_basis_func()]
        if self.trig_basis:
            for freq in range(1,self.trig_freq+1):
                basis_funcs += get_trig_basis_funcs(freq)
                if self.trig_interaction:
                    basis_funcs += get_trig_interaction_terms(freq)

        if self.poly_basis:
            for deg in range(2,self.poly_degree+1):
                basis_funcs.append(get_poly_basis_func(deg))
            if self.poly_cross_terms:
                for deg in range(2,self.poly_degree+1):
                    basis_funcs += get_cross_term_basis_funcs(deg)

        library_functions = [basis.func for basis in basis_funcs]
        function_names = [basis.name_func for basis in basis_funcs]
        library = ps.CustomLibrary(library_functions=library_functions,
                function_names=function_names)
        self.basis_funcs = basis_funcs

        if self.time_mode == "continuous":
            sindy_model = ps.SINDy(feature_library=library, 
                    discrete_time=False,
                    optimizer=ps.STLSQ(threshold=self.threshold))
            sindy_model.fit(X, u=U, multiple_trajectories=True, 
                    t=self.system.dt, x_dot=xdot)
        elif self.time_mode == "discrete":
            sindy_model = ps.SINDy(feature_library=library, 
                    discrete_time=True,
                    optimizer=ps.STLSQ(threshold=self.threshold))
            sindy_model.fit(X, u=U, multiple_trajectories=True)
        self.model = sindy_model
Example #4
0
    def test_default_lorenz_model(self):
        contents = read_file("data_Lorenz3d.csv", "")
        time_series, dt, contents, variable_names = clean_contents(contents)
        model = ps.SINDy(feature_names=variable_names)
        model.fit(contents, t=dt)

        actual_co = model.coefficients()
        actual_score = model.score(contents, t=time_series)

        expected_co = [[0.0, -10.0, 10.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
                       [0.0, 28.0, -1.0, 0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
                       [0.0, 0.0, 0.0, -2.666, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]]
        expected_score = 1.0

        assert (pytest.approx(actual_co, 0.1) == expected_co)
        assert (pytest.approx(actual_score, 0.01) == expected_score)
Example #5
0
def make_model(data, calculate_derivatives=True):
    """
    constructs SINDy model
    ...

    Parameters
    ----------
    data: Data object
        training data object constructed using Data class
    calculate_derivatives: bool
        if False, precalculated derivative values will be used.
        Otherwise, they will be calculated during training (differentiation method can be specified)

    Returns
    -------
    model: SINDy model object
        trained SINDy model
    """

    optimizer = ps.SR3(threshold=0.01,
                       thresholder='l1',
                       normalize=True,
                       max_iter=1000)  # ?
    feature_lib = ps.PolynomialLibrary(degree=2)  # ?

    model = ps.SINDy(optimizer,
                     feature_library=feature_lib,
                     feature_names=FEATURES + COMMANDS)

    if calculate_derivatives:
        model.fit(x=data.x,
                  u=data.u,
                  t=data.t,
                  multiple_trajectories=data.multiple_trajectories)
    else:
        model.fit(x=data.x,
                  x_dot=data.x_dot,
                  u=data.u,
                  t=data.t,
                  multiple_trajectories=data.multiple_trajectories)

    return model
Example #6
0
    def test_default_lorenz_plot(self):
        contents = read_file("data_Lorenz3d.csv", "")
        time_series, dt, contents, variable_names = clean_contents(contents)
        model = ps.SINDy(feature_names=variable_names)
        model.fit(contents, t=dt)

        coefs = model.coefficients()
        feats = model.get_feature_names(
        )  # Get the feature names from the obtained model

        conds = np.array(
            [float(val) for val in contents[0]]
        )  # Convert the system's initial conditions into a numpy array of float values as this is what is expected by the model.simulate() function
        sim_data = model.simulate(
            conds, time_series
        )  # Create the forward simulated data. This uses the original initial conditions evolved with the model output equations to obtain new data

        fig, _ = create_plot(time_series, contents, variable_names, coefs,
                             feats, sim_data)

        return fig
# DX_K = np.copy(x_dot)
# DX_K[:, 0] = x_dot[:, 0] - a*X[:, 0]    
# DX_K[:, 1] = x_dot[:, 1] - c*X[:, 1]  
# print(DX_K.shape)
# plt.plot(tt, DX_K)

# %%
differentiation_method = ps.FiniteDifference(order=2)
optimizer = ps.STLSQ(threshold=th)
feature_library = ps.PolynomialLibrary(degree=2)


# %%
model = ps.SINDy(
    differentiation_method=differentiation_method,
    feature_library=feature_library,
    optimizer=optimizer,
    feature_names=feature_names
)
dt= t_end/(X.shape[0]-1)
print("PySINDy nominal")
model.fit(X, x_dot=DX_, t=dt, multiple_trajectories=False) 
%time model.print()
%time p_ident_nominal = model.coefficients().T
calc_param_ident_error(p_nom, p_ident_nominal)

print("PySINDy zentral")
%time model.fit(X, x_dot=x_dot, t=dt, multiple_trajectories=False) 
%time model.print()
%time p_ident_zentral = model.coefficients().T
calc_param_ident_error(p_nom, p_ident_zentral)
Example #8
0
def fit_model_cb(sender, data):
    df = get_data(cfg.CSV)

    # Get x data
    x_names = get_data(cfg.X_NAMES)
    X = df[x_names].to_numpy()
    add_data(cfg.X, X)

    # Get der data

    # Get input data
    u_names = get_data(cfg.U_NAMES)
    if len(u_names) > 0:
        U = df[u_names].to_numpy()
    else:
        U = None

    add_data(cfg.U, U)

    # Get time data
    if get_value("Method##time") == "Constant Step":
        dt = get_value("Step size##time")
        time = np.arange(0, X.shape[0]) * dt
    else:
        time_channel = get_value("Channel##combo##time")
        if time_channel == None or time_channel == "":
            log_error("Time channel must be selected", logger="logger##main")
            return
        time = df[time_channel].to_numpy()
    add_data(cfg.TIME, time)

    # Get Optimizer
    if get_value("Methods##Optimizers") == "STLSQ":
        threshold = get_value("threshold##optimizers")
        alpha = get_value("alpha##optimizers")
        max_iter = get_value("max_iter##optimizers")
        optimizer = ps.STLSQ(threshold=threshold,
                             alpha=alpha,
                             max_iter=max_iter)
    elif get_value("Methods##Optimizers") == "Lasso":
        alpha = get_value("alpha##optimizers")
        max_iter = get_value("max_iter##optimizers")
        optimizer = Lasso(alpha=alpha, max_iter=max_iter, fit_intercept=False)
    else:
        optimizer = None

    ##############################
    # Feature libraries
    ##############################
    libs = []
    if get_value("Enable##polynomial##libraries") == True:
        degree = get_value("Degree##polynomial##libraries")
        include_interaction = not (get_value("Type##polynomial##libraries")
                                   == "Only states")
        interaction_only = get_value(
            "Type##polynomial##libraries") == "Only interaction"
        include_bias = get_value("Include bias terms##polynomial##libraries")
        log_debug(
            f'degree: {degree}, include_interaction: {include_interaction}, interaction_only: {interaction_only}, include_bias: {include_bias}'
        )
        libs.append(
            ps.PolynomialLibrary(degree=degree,
                                 include_interaction=include_interaction,
                                 interaction_only=interaction_only,
                                 include_bias=include_bias))

    if get_value("Enable##fourier##libraries") == True:
        n_frequencies = get_value("n_frequencies##fourier##libraries")
        include_sin = get_value("Include sin##fourier##libraries")
        include_cos = get_value("Include cos##fourier##libraries")
        try:
            fourierlib = ps.FourierLibrary(n_frequencies=n_frequencies,
                                           include_sin=include_sin,
                                           include_cos=include_cos)
            libs.append(fourierlib)
        except ValueError as err:
            log_error(err, "logger##main")
            return

    if get_value("Enable##identity##libraries") == True:
        libs.append(ps.IdentityLibrary())

    # Handle the case if nothing's selected
    if not libs:
        libs.append(ps.PolynomialLibrary())

    log_debug(libs, logger="logger##main")

    # Get "feature_library" by reducing the "libs" list
    feature_library = functools.reduce(lambda a, b: a + b, libs)
    try:
        model = ps.SINDy(optimizer=optimizer, feature_library=feature_library)
        model.fit(X, t=time, u=U)
        log_info(f"Model fitted.", logger="logger##main")
    except ValueError as err:
        log_error(err, logger="logger##main")
        return

    model_eqs = []
    for i, eq in enumerate(model.equations()):
        model_eqs.append(f"der(x{i}) = {eq}")
    model_text = "\n".join(model_eqs)

    set_value("Score##fitting", model.score(X, time, u=U))
    set_value("Equations##fitting", model_text)
    add_data(cfg.MODEL, model)

    # clear X_fit listbox
    configure_item("X_fit##fitting", items=[])
# Training data
t_fine = np.linspace(0, Tf, N_fine + 1)
X_fine = odeint_jax(LV, x0, t_fine, alpha, beta, gamma, delta)
X_fine_noise = X_fine + noise * X_fine.std(0) * random.normal(
    key, X_fine.shape)
t = t_fine[onp.array(list(range(0, N_fine + 1, N_fine // N)))]
X_train = X_fine_noise[list(range(0, N_fine + 1, N_fine // N)), :]

gap = 4
ind_t = np.array([0])
ind_t = np.concatenate([ind_t[:, None], np.arange(gap + 1, N + 1)[:, None]])
ind_t = ind_t[:, 0]

t_grid = onp.array(t[ind_t])
print('case_1_as_GP_NODE', t_grid, t_grid.shape)
model_GP_ODE = ps.SINDy(feature_library=custom_library)
model_GP_ODE.fit(onp.array(X_train[ind_t, :]), t=t_grid)
print('case_1_as_GP_NODE:')
model_GP_ODE.print()
x_test_sim = model_GP_ODE.simulate(x0_onp, t_grid_test)

plt.figure(figsize=(12, 6.5))
plt.xticks(fontsize=22)
plt.yticks(fontsize=22)
plt.plot(t_grid_test,
         data_test[:, 0],
         'r-',
         label="True trajectory of $x_1(t)$")
plt.plot(t_grid, X_train[ind_t, 0], 'ro', label="Training data of $x_1(t)$")
plt.plot(t_grid_test,
         x_test_sim[:, 0],
Example #10
0
    # fit Koopman operator
    # model = ps.SINDy()
    # model.fit(Vtilde[:1000, :-1], t=.01)
    # model.print()

    # linear regression:
    # reg = linear_model.LinearRegression()
    # reg.fit(Vtilde[100:600, :-1], dVtilde_dt[100:600, :-1])
    # A = reg.coef_
    # fig, ax = plt.subplots()
    # ax = sns.heatmap(A, center=0)
    #
    # dVtilde_dt_pred = reg.predict(Vtilde[600:, :-1])

    model = ps.SINDy()
    model.fit(Vtilde, t=0.01)
    model.print()
    #
    # fig, ax = plt.subplots()
    # ax.plot(dVtilde_dt_pred[:, 0], alpha=0.6, c='k', linestyle='--', label="pred")
    # ax.plot(dVtilde_dt[500:, 0], alpha=0.6, lw=1, label="true")
    # ax.legend()
    #
    # # runge-kutta integration
    # y = first_order_kutta_runge(dx_dt=dVtilde_dt_pred, x=Vtilde, starting_point=600)
    #
    # # plot the predicted 1st component
    # fig, ax = plt.subplots()
    # ax.plot(y.T[0], alpha=0.6, lw=1, label="pred")
    # ax.legend()
Example #11
0
functions = [lambda x,y : x/(x**2+y**2)**(3/2), lambda x,y : y/(x**2+y**2)**(3/2)] # The specific functions we're looking for
lib_custom = CustomLibrary(library_functions=functions) # defines the custom library we want to use


# ## Model identification
# 
# The following cell includes the model optimization and model identification. 

# In[6]:


optimizer = ps.STLSQ(threshold=1)

model = ps.SINDy( 
        feature_library = lib_custom, 
        optimizer=optimizer,
        feature_names = ['x', 'y'])

xd = np.gradient(x[:, 0], t)
yd = np.gradient(x[:, 1], t)
dot = np.array([xd, yd]).T # .T is to transpose the array


model.fit(x, t=t, x_dot=a)
model.print()
coef = model.coefficients()
print(coef)


# ## Data analysis
Example #12
0
    # LPF?

    # DO[1, :] = 10 * DO[1, :]
    # DO = stats.zscore(DO,axis=1)

    coeff_gram = []
    model_scores = []
    skips = 10
    length_window = int(1 / dt * 30)

    sample_vect = np.arange(DO.shape[1])
    for tt in sample_vect[::skips]:
        if tt + length_window > DO.shape[1]:
            continue
        model = ps.SINDy(optimizer=optimizer, feature_library=lib_generalized)

        DO_snip = DO[:, tt:tt + length_window]
        model.fit(DO_snip.T, t=dt)
        coeff_gram.append(model.coefficients())
        model_scores.append(model.score(DO_snip.T))
    coeff_grams[pt] = np.array(coeff_gram)
#%%

dyn_feat_names = lib_generalized.get_feature_names()
split_dyn_feat_names = ["L: " + a for a in dyn_feat_names
                        ] + ["R: " + a for a in dyn_feat_names]

x_0_coeffs = [a for a in split_dyn_feat_names if a.find("x0") != -1]
x_1_coeffs = [a for a in split_dyn_feat_names if a.find("x1") != -1]