Beispiel #1
0
config = {
    'n_in': 2,
    'hidden_dims': [30, 30, 30, 30, 30],
    'n_out': 1,
    'library_function': library_1D_in,
    'library_args': {
        'poly_order': 2,
        'diff_order': 3
    }
}
n_runs = 5

for run_idx in np.arange(n_runs):
    X_train, y_train, rand_idx = dataset.create_dataset(x_grid.reshape(-1, 1),
                                                        t_grid.reshape(-1, 1),
                                                        n_samples=1000,
                                                        noise=0.1,
                                                        random=True,
                                                        return_idx=True)

    theta = dataset.library(x_grid.reshape(-1, 1),
                            t_grid.reshape(-1, 1),
                            poly_order=2,
                            deriv_order=3)[rand_idx, :]
    dt = dataset.time_deriv(x_grid.reshape(-1, 1),
                            t_grid.reshape(-1, 1))[rand_idx, :]

    model = DeepMod(**config)
    optimizer = torch.optim.Adam(model.parameters(),
                                 betas=(0.99, 0.999),
                                 amsgrad=True)
    train(model,
Beispiel #2
0
# Settings for reproducibility
np.random.seed(42)
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

# Making data
v = 0.2
A = 1.0
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)

x_grid, t_grid = np.meshgrid(x, t, indexing='ij')
dataset = Dataset(BurgersDelta, v=v, A=A)
X_train, y_train = dataset.create_dataset(x_grid.reshape(-1, 1),
                                          t_grid.reshape(-1, 1),
                                          n_samples=1000,
                                          noise=0.2)

# Configuring model
network = NN(2, [30, 30, 30, 30, 30], 1)  # Function approximator
library = Library1D(poly_order=2, diff_order=2)  # Library function
estimator = PINN([2, 4])  # active terms are 2 and 5
constraint = LeastSquares()  # How to constrain
model = DeepMoD(network, library, estimator,
                constraint)  # Putting it all in the model

# Running model
sparsity_scheduler = Periodic(initial_epoch=0,
                              periodicity=1)  # Defining when to apply sparsity
optimizer = torch.optim.Adam(model.parameters(),
                             betas=(0.99, 0.999),
Beispiel #3
0
from sklearn.linear_model import LassoLarsIC

if torch.cuda.is_available():
    torch.set_default_tensor_type('torch.cuda.FloatTensor')
np.random.seed(42)
torch.manual_seed(42)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')
dataset = Dataset(BurgersDelta, v=v, A=A)

noise_range = np.arange(0.0, 1.01, 0.10)
n_runs = 5

for noise_level in noise_range:
    for run in np.arange(n_runs):
        X_train, y_train = dataset.create_dataset(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), n_samples=1000, noise=noise_level, random=True, return_idx=False, random_state=run)
        estimator = Clustering(estimator=LassoLarsIC(fit_intercept=False))
        config = {'n_in': 2, 'hidden_dims': [30, 30, 30, 30, 30], 'n_out': 1, 'library_function':library_1D_in, 'library_args':{'poly_order':2, 'diff_order': 3}, 'sparsity_estimator': estimator}
        model = DeepModDynamic(**config)
        optimizer = torch.optim.Adam(model.parameters(), betas=(0.99, 0.999), amsgrad=True)
        train_dynamic(model, X_train, y_train, optimizer, 10000, log_dir=f'runs/cluster_{noise_level:.2f}_run_{run}/')

# Settings for reproducibility
np.random.seed(42)
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

# Making training set
dataset = Dataset(DoubleSoliton, c=(5, 2), x0=(-3, -1))

x_sample = np.linspace(-7, 5, 50)
t_sample = np.linspace(0.0, 1.0, 40)
x_grid_sample, t_grid_sample = np.meshgrid(x_sample, t_sample, indexing='ij')
X_train, y_train = dataset.create_dataset(x_grid_sample.reshape(-1, 1),
                                          t_grid_sample.reshape(-1, 1),
                                          n_samples=0,
                                          noise=0.1,
                                          normalize=True,
                                          random=True)

# Configuring model
network = Siren(2, [30, 30, 30, 30, 30], 1)  # Function approximator
library = Library1D(poly_order=2, diff_order=3)  # Library function
estimator = Threshold(0.1)  #Clustering() # Sparse estimator
constraint = LeastSquares()  # How to constrain
model = DeepMoD(network, library, estimator,
                constraint)  # Putting it all in the model

# Running model
sparsity_scheduler = Periodic(
    initial_epoch=5000, periodicity=100)  # Defining when to apply sparsity
optimizer = torch.optim.Adam(model.parameters(),
Beispiel #5
0
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')

# Making data
dataset = Dataset(BurgersDelta, v=v, A=A)
X_train, y_train = dataset.create_dataset(x_grid.reshape(-1, 1),
                                          t_grid.reshape(-1, 1),
                                          n_samples=2000,
                                          noise=0.1,
                                          random=True)

# Running deepmod
estimator = LassoLarsIC(fit_intercept=False)
config = {
    'n_in': 2,
    'hidden_dims': [30, 30, 30, 30, 30],
    'n_out': 1,
    'library_function': library_1D_in,
    'library_args': {
        'poly_order': 2,
        'diff_order': 2
    },
    'sparsity_estimator': estimator
Beispiel #6
0
for run_idx in np.arange(n_runs):
    network = NN(2, [30, 30, 30, 30, 30], 1)
    library = Library1D(poly_order=2, diff_order=3)  # Library function
    estimator = Threshold(0.1)  # Sparse estimator
    constraint = LeastSquares()  # How to constrain
    model = DeepMoD(network, library, estimator,
                    constraint).to(device)  # Putting it all in the model

    sparsity_scheduler = Periodic(periodicity=25, initial_epoch=10000)
    optimizer = torch.optim.Adam(model.parameters(),
                                 betas=(0.9, 0.999),
                                 amsgrad=True)  # Defining optimizer

    X, y = dataset.create_dataset(x_grid.reshape(-1, 1),
                                  t_grid.reshape(-1, 1),
                                  n_samples=1000,
                                  noise=0.4,
                                  random=True,
                                  normalize=False)
    X, y = X.to(device), y.to(device)

    train(model,
          X,
          y,
          optimizer,
          sparsity_scheduler,
          log_dir=f'data_high_noise/baseline_run_{run_idx}/',
          write_iterations=25,
          max_iterations=5000,
          delta=0.00,
          patience=100)  # Running
Beispiel #7
0
# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')
dataset = Dataset(BurgersDelta, v=v, A=A)

noise_range = np.arange(0.0, 1.61, 0.20)  #np.arange(0.0, 0.51, 0.05)
n_runs = 5

for noise_level in noise_range:
    for run in np.arange(n_runs):
        X_train, y_train = dataset.create_dataset(
            x_grid.reshape(-1, 1),
            t_grid.reshape(-1, 1),
            n_samples=1000,
            noise=noise_level,
            random=True,
            return_idx=False,
            random_state=run
        )  # use the same dataset for every run; only diff is in the network
        estimator = Clustering(estimator=LassoLarsIC(fit_intercept=False))
        config = {
            'n_in': 2,
            'hidden_dims': [30, 30, 30, 30, 30],
            'n_out': 1,
            'library_function': library_1D_in,
            'library_args': {
                'poly_order': 2,
                'diff_order': 3
            },
            'sparsity_estimator': estimator
x_grid, t_grid = np.meshgrid(x, t)

dataset = Dataset(BurgersDelta, v=0.1, A=1.0)
dataset = Dataset(BurgersCos, v=0.1, a=0.1, b=0.1, k=2)
dataset = Dataset(BurgersSawtooth, v=0.1)
#dataset = Dataset(KdVSoliton, c=5.0, a = -1.0, b=1)

dataset.generate_solution(x_grid, t_grid).shape
dataset.parameters

dataset.time_deriv(x_grid, t_grid).shape

theta = dataset.library(x_grid.reshape(-1, 1),
                        t_grid.reshape(-1, 1),
                        poly_order=2,
                        deriv_order=2)
dt = dataset.time_deriv(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1))

theta.shape
np.linalg.lstsq(theta, dt, rcond=None)[0]

X_train, y_train = dataset.create_dataset(x_grid,
                                          t_grid,
                                          n_samples=0,
                                          noise=0.05)

y_train.shape

from phimal_utilities.analysis import load_tensorboard