Exemple #1
0
from sklearn.linear_model import LassoLarsIC

if torch.cuda.is_available():
    torch.set_default_tensor_type('torch.cuda.FloatTensor')
np.random.seed(42)
torch.manual_seed(42)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')
dataset = Dataset(BurgersDelta, v=v, A=A)

noise_range = np.arange(0.0, 1.01, 0.10)
n_runs = 5

for noise_level in noise_range:
    for run in np.arange(n_runs):
        X_train, y_train = dataset.create_dataset(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), n_samples=1000, noise=noise_level, random=True, return_idx=False, random_state=run)
        estimator = Clustering(estimator=LassoLarsIC(fit_intercept=False))
        config = {'n_in': 2, 'hidden_dims': [30, 30, 30, 30, 30], 'n_out': 1, 'library_function':library_1D_in, 'library_args':{'poly_order':2, 'diff_order': 3}, 'sparsity_estimator': estimator}
        model = DeepModDynamic(**config)
        optimizer = torch.optim.Adam(model.parameters(), betas=(0.99, 0.999), amsgrad=True)
        train_dynamic(model, X_train, y_train, optimizer, 10000, log_dir=f'runs/cluster_{noise_level:.2f}_run_{run}/')

Exemple #2
0
                       requires_grad=True)
y_train = torch.tensor(y_noisy[idx, :][:number_of_samples],
                       dtype=torch.float32)

estimator = LarsCV(fit_intercept=False)

config = {
    'n_in': 2,
    'hidden_dims': [20, 20, 20, 20, 20, 20, 20],
    'n_out': 1,
    'library_function': library_1D_in,
    'library_args': {
        'poly_order': 1,
        'diff_order': 4
    },
    'sparsity_estimator': estimator
}
model = DeepModDynamic(**config)
optimizer = torch.optim.Adam(model.network_parameters(),
                             betas=(0.99, 0.99),
                             amsgrad=True)

train(model,
      X_train,
      y_train,
      optimizer,
      100000,
      loss_func_args={
          'start_sparsity_update': 3000,
          'sparsity_update_period': 250
      })
Exemple #3
0
    torch.set_default_tensor_type('torch.cuda.FloatTensor')
np.random.seed(42)
torch.manual_seed(42)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 100)
t = np.linspace(0.5, 5.0, 50)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')

# Making data
estimator = LassoLarsIC(fit_intercept=False)
dataset = Dataset(BurgersDelta, v=v, A=A)
config = {'n_in': 2, 'hidden_dims': [30, 30, 30, 30, 30], 'n_out': 1, 'library_function':library_1D_in, 'library_args':{'poly_order':2, 'diff_order': 3}, 'sparsity_estimator': estimator}
n_runs = 5

for run_idx in np.arange(n_runs):
    X_train, y_train, rand_idx = dataset.create_dataset(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), n_samples=1000, noise=0.1, random=True, return_idx=True)
    
    theta = dataset.library(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), poly_order=2, deriv_order=3)[rand_idx, :]
    dt = dataset.time_deriv(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1))[rand_idx, :]
    
    model = DeepModDynamic(**config)
    optimizer = torch.optim.Adam(model.parameters(), betas=(0.99, 0.999), amsgrad=True)
    train(model, X_train, y_train, optimizer, 20000, loss_func_args={'library':torch.tensor(theta) ,'time_deriv': torch.tensor(dt)}, log_dir = f'runs/deepmod_logprob_lstsq_run_{run_idx}')
    torch.save(model.state_dict(), f'data/deepmod_logprob_lstsq_run_{run_idx}.pt')
Exemple #4
0
                                           n_samples=1000,
                                           noise=noise_level,
                                           random=True,
                                           return_idx=False,
                                           random_state=run)
 estimator = Threshold(threshold=0.1,
                       estimator=LassoCV(cv=5, fit_intercept=False))
 config = {
     'n_in': 2,
     'hidden_dims': [30, 30, 30, 30, 30],
     'n_out': 1,
     'library_function': library_1D_in,
     'library_args': {
         'poly_order': 2,
         'diff_order': 3
     },
     'sparsity_estimator': estimator
 }
 model = DeepModDynamic(**config)
 optimizer = torch.optim.Adam(model.parameters(),
                              betas=(0.99, 0.999),
                              amsgrad=True)
 print(torch.sum(X_train), torch.sum(y_train))
 train_dynamic(model,
               X_train,
               y_train,
               optimizer,
               10000,
               log_dir=f'runs/threshold_{noise_level:.2f}_run_{run}/')
 torch.save(model.state_dict(),
            f'data/threshold_model_{noise_level:.2f}_{run}')
Exemple #5
0
                                          random=True)

# Running deepmod
estimator = LassoLarsIC(fit_intercept=False)
config = {
    'n_in': 2,
    'hidden_dims': [30, 30, 30, 30, 30],
    'n_out': 1,
    'library_function': library_1D_in,
    'library_args': {
        'poly_order': 2,
        'diff_order': 2
    },
    'sparsity_estimator': estimator
}
model = DeepModDynamic(**config)
sigma = torch.tensor(0.1, dtype=torch.float32, requires_grad=True)

optimizer = torch.optim.Adam([{
    'params': model.network_parameters(),
    'betas': (0.99, 0.999)
}, {
    'params': sigma,
    'betas': (0.99, 0.999)
}],
                             amsgrad=True)

train_dynamic_logprob(model,
                      X_train,
                      y_train,
                      optimizer,
# Settings and parameters
if torch.cuda.is_available():
    torch.set_default_tensor_type('torch.cuda.FloatTensor')
np.random.seed(42)
torch.manual_seed(42)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False

v = 0.1
A = 1.0

# Making grid
x = np.linspace(-3, 4, 80)
t = np.linspace(0.5, 5.0, 25)
x_grid, t_grid = np.meshgrid(x, t, indexing='ij')

# Making data
dataset = Dataset(BurgersDelta, v=v, A=A)
X_train, y_train = dataset.create_dataset(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), n_samples=0, noise=0.1, random=False)

theta = dataset.library(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1), poly_order=2, deriv_order=3)
dt = dataset.time_deriv(x_grid.reshape(-1, 1), t_grid.reshape(-1, 1))

# Running deepmod
config = {'n_in': 2, 'hidden_dims': [30, 30, 30, 30, 30], 'n_out': 1, 'library_function':library_1D_in, 'library_args':{'poly_order':2, 'diff_order': 3}, 'sparsity_estimator': LassoLarsIC(fit_intercept=False)}
model = DeepModDynamic(**config)

optimizer = torch.optim.Adam(model.network_parameters(), betas=(0.99, 0.999), amsgrad=True)
train(model, X_train, y_train, optimizer, 100000, loss_func_args={'library':torch.tensor(theta) ,'time_deriv': torch.tensor(dt)})

torch.save(model.state_dict(), 'data/deepmod_lstsq.pt')