# Settings and parameters
if torch.cuda.is_available():
    torch.set_default_tensor_type('torch.cuda.FloatTensor')
np.random.seed(42)
torch.manual_seed(42)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')

# Making data
dataset = Dataset(BurgersDelta, v=v, A=A)
config = {'n_in': 2, 'hidden_dims': [30, 30, 30, 30, 30], 'n_out': 1, 'library_function':library_1D_in, 'library_args':{'poly_order':2, 'diff_order': 3}}
n_runs = 5

for run_idx in np.arange(n_runs):
    X_train, y_train, rand_idx = dataset.create_dataset(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), n_samples=1000, noise=0.1, random=True, return_idx=True)
    
    theta = dataset.library(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), poly_order=2, deriv_order=3)[rand_idx, :]
    dt = dataset.time_deriv(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1))[rand_idx, :]
    
    model = DeepMod(**config)
    optimizer = torch.optim.Adam(model.parameters(), betas=(0.99, 0.999), amsgrad=True)
    train(model, X_train, y_train, optimizer, 20000, loss_func_args={'library':torch.tensor(theta) ,'time_deriv': torch.tensor(dt)}, log_dir = f'runs_new/deepmod_logprob_scaled_run_{run_idx}')
    torch.save(model.state_dict(), f'data/deepmod_logprob_scaled_run_{run_idx}.pt')
Ejemplo n.º 2
0
for run_idx in np.arange(n_runs):
    X_train, y_train, rand_idx = dataset.create_dataset(x_grid.reshape(-1, 1),
                                                        t_grid.reshape(-1, 1),
                                                        n_samples=1000,
                                                        noise=0.1,
                                                        random=True,
                                                        return_idx=True)

    theta = dataset.library(x_grid.reshape(-1, 1),
                            t_grid.reshape(-1, 1),
                            poly_order=2,
                            deriv_order=3)[rand_idx, :]
    dt = dataset.time_deriv(x_grid.reshape(-1, 1),
                            t_grid.reshape(-1, 1))[rand_idx, :]

    model = DeepMod(**config)
    optimizer = torch.optim.Adam(model.parameters(),
                                 betas=(0.99, 0.999),
                                 amsgrad=True)
    train(model,
          X_train,
          y_train,
          optimizer,
          20000,
          loss_func_args={
              'library': torch.tensor(theta),
              'time_deriv': torch.tensor(dt)
          },
          log_dir=f'runs_new/deepmod_run_{run_idx}')
    torch.save(model.state_dict(), f'data/deepmod_run_{run_idx}.pt')
Ejemplo n.º 3
0
                                          n_samples=2000,
                                          noise=0.1,
                                          random=True)

# Running deepmod
config = {
    'n_in': 2,
    'hidden_dims': [30, 30, 30, 30, 30],
    'n_out': 1,
    'library_function': library_1D_in,
    'library_args': {
        'poly_order': 2,
        'diff_order': 2
    }
}
model = DeepMod(**config)
sigma = torch.tensor(0.1, dtype=torch.float32, requires_grad=True)

optimizer = torch.optim.Adam([{
    'params': model.parameters(),
    'betas': (0.99, 0.999)
}, {
    'params': sigma,
    'betas': (0.99, 0.999)
}],
                             amsgrad=True)
train_logprob(model,
              X_train,
              y_train,
              optimizer,
              20000,