Ejemplo n.º 1
0
import os
import datetime
import pandas as pd
import numpy as np
from example_pendulum import get_pendulum_data
from sindy_utils import library_size
from training import train_network
import tensorflow as tf

# SET UP PARAMETERS
params = {}

# generate training, validation, testing data
training_data = get_pendulum_data(100)
validation_data = get_pendulum_data(10)

params['N'] = training_data['x'].shape[-1]
params['d'] = 1
params['model_order'] = 2
params['poly_order'] = 3
params['include_sine'] = True
params['l'] = library_size(2 * params['d'], params['poly_order'],
                           params['include_sine'], True)

# set up sequential thresholding
params['sequential_thresholding'] = True
params['coefficient_threshold'] = 0.1
params['threshold_frequency'] = 500
params['coefficient_mask'] = np.ones((params['l'], params['d']))
params['coefficient_initialization'] = 'constant'
Ejemplo n.º 2
0
import os
import datetime
import pandas as pd
import numpy as np
from example_pendulum import get_pendulum_data
from sindy_utils import library_size
from training import train_network
import tensorflow as tf


# SET UP PARAMETERS
params = {}

# generate training, validation, testing data
training_data, val_data, test_data = get_pendulum_data(100, 10, 10)

params['N'] = training_data['x'].shape[-1]
params['d'] = 1
params['model_order'] = 2
params['poly_order'] = 3
params['include_sine'] = True
params['l'] = library_size(2*params['d'], params['poly_order'], params['include_sine'], True)

# set up sequential thresholding
params['sequential_thresholding'] = True
params['coefficient_threshold'] = 0.1
params['threshold_frequency'] = 500
params['coefficient_mask'] = np.ones((params['l'], params['d']))
params['coefficient_initialization'] = 'constant'

# define loss weights
import pickle
from example_lorenz import get_lorenz_data
from example_pendulum import get_pendulum_data
from example_reactiondiffusion import get_rd_data
from training import create_feed_dictionary
from autoencoder import full_network, define_loss
import tensorflow as tf

data_path = os.getcwd() + '/'
example_problem = sys.argv[1]
save_name = data_path + sys.argv[2]

if example_problem == 'lorenz':
    test_data = get_lorenz_data(1, 1, 100)[2]
elif example_problem == 'pendulum':
    test_data = get_pendulum_data(1, 1, 50)[2]
else:
    test_data = get_rd_data()[2]

params = pickle.load(open(save_name + '_params.pkl', 'rb'))
test_dict = create_feed_dictionary(test_data, params)

autoencoder_network = full_network(params)
learning_rate = tf.placeholder(tf.float32, name='learning_rate')
saver = tf.train.Saver(
    var_list=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES))

run_tuple = ()
for key in autoencoder_network.keys():
    run_tuple += (autoencoder_network[key], )
import pickle
from example_lorenz import get_lorenz_data
from example_pendulum import get_pendulum_data
from example_reactiondiffusion import get_rd_data
from training import create_feed_dictionary
from autoencoder import full_network, define_loss
import tensorflow as tf

data_path = os.getcwd() + '/'
example_problem = sys.argv[1]
save_name = data_path + sys.argv[2]

if example_problem == 'lorenz':
    test_data = get_lorenz_data(100)
elif example_problem == 'pendulum':
    test_data = get_pendulum_data(50)
else:
    test_data = get_rd_data()[2]

params = pickle.load(open(save_name + '_params.pkl', 'rb'))
test_dict = create_feed_dictionary(test_data, params)

autoencoder_network = full_network(params)
learning_rate = tf.placeholder(tf.float32, name='learning_rate')
saver = tf.train.Saver(
    var_list=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES))

run_tuple = ()
for key in autoencoder_network.keys():
    run_tuple += (autoencoder_network[key], )
Ejemplo n.º 5
0
 def __init__(self, n_ics):
     data = get_pendulum_data(n_ics)
     B, H, W = data['x'].shape
     self.x = torch.Tensor(data['x']).unsqueeze(1)
     self.dx = torch.Tensor(data['dx']).unsqueeze(1)
     self.ddx = torch.Tensor(data['ddx']).unsqueeze(1)