Exemplo n.º 1
0
def test_saving_deep_parameterized_object(session_tf, filename,
                                          deep_structure):
    sess_a = session_tf
    gp.Saver().save(filename, deep_structure)
    with session_context() as sess_b:
        copy = gp.Saver().load(filename)
        equal_params(deep_structure.a,
                     copy.a,
                     session_a=sess_a,
                     session_b=sess_b)
        equal_params(deep_structure.b,
                     copy.b,
                     session_a=sess_a,
                     session_b=sess_b)
        equal_params(deep_structure.c.a,
                     copy.c.a,
                     session_a=sess_a,
                     session_b=sess_b)
        equal_params(deep_structure.c.b,
                     copy.c.b,
                     session_a=sess_a,
                     session_b=sess_b)
        equal_params(deep_structure.c.c.a,
                     copy.c.c.a,
                     session_a=sess_a,
                     session_b=sess_b)
        equal_params(deep_structure.c.c.b,
                     copy.c.c.b,
                     session_a=sess_a,
                     session_b=sess_b)
Exemplo n.º 2
0
def test_loading_without_autocompile(session_tf, filename, model):
    gp.Saver().save(filename, model)
    with session_context() as session:
        context = gp.SaverContext(autocompile=False)
        loaded = gp.Saver().load(filename, context=context)
        assert loaded.is_built(session_tf.graph) == gp.Build.NO
        assert loaded.is_built(session.graph) == gp.Build.NO
        assert not any(loaded.trainable_tensors)
Exemplo n.º 3
0
def test_saving_gpflow_model(session_tf, filename, model):
    x_new = Data.x_new()
    predict_origin = model.predict_f(x_new)
    gp.Saver().save(filename, model)
    with session_context() as session:
        loaded = gp.Saver().load(filename)
        predict_loaded = loaded.predict_f(x_new)
        assert_allclose(predict_origin, predict_loaded)
Exemplo n.º 4
0
def test_loading_into_specific_session(session_tf, filename, model):
    x_new = Data.x_new()
    predict_origin = model.predict_f(x_new)
    gp.Saver().save(filename, model)
    with session_context() as session:
        context = gp.SaverContext(session=session)
        loaded = gp.Saver().load(filename, context=context)
        predict_loaded = loaded.predict_f(x_new, session=session)
    assert_allclose(predict_origin, predict_loaded)
Exemplo n.º 5
0
    def load_model(self, fpath):
        self.m = gpf.Saver().load(fpath)

        fbase, fname = os.path.splitext(fpath)

        with open(fbase + '.pkl', 'rb') as f:
            params = pickle.load(f)
            for k, v in params.items():
                setattr(self, k, v)
Exemplo n.º 6
0
    def save_model(self, fpath):
        gpf.Saver().save(fpath, self.m)

        fbase, fname = os.path.splitext(fpath)

        params = {
            'scaling': self.scaling,
            'multi_output': self.multi_output,
            'xscaler': self.xscaler,
            'yscaler': self.yscaler,
            'xmin': self.xmin,
            'xmax': self.xmax,
            'X_in_min': self.X_in_min,
            'X_in_max': self.X_in_max,
            'Y_in_min': self.Y_in_min,
            'Y_in_max': self.Y_in_max,
            'n_in_dims': self.n_in_dims,
            'n_out_dims': self.n_out_dims
        }
        with open(fbase + '.pkl', 'wb') as f:
            pickle.dump(params, f)
k1 = gpflow.kernels.Matern32(1, active_dims=[0])
coreg = gpflow.kernels.Coregion(1, output_dim=M, rank=M, active_dims=[1])
kern = k1 * coreg
# construct kernel
like_array = [gpflow.likelihoods.StudentT(), gpflow.likelihoods.StudentT()]
for i in range(M-2):
    like_array.append(gpflow.likelihoods.StudentT())
lik = gpflow.likelihoods.SwitchedLikelihood(like_array)
# construct x
X_augmented = np.vstack(
    (np.hstack((start[:, 0:1], np.zeros_like(start[:, 0:1]))), np.hstack((start[:, 1:2], np.ones_like(start[:, 1:2]))))
)
for i in range(M-2):
    X_augmented = np.vstack(
        (X_augmented, np.hstack((start[:, i+2:i+3], (i+2)*np.ones_like(start[:, i+2:i+3]))))
    )
# construct y
Y_augmented = np.vstack(
    (np.hstack((predict[:, 0:1], np.zeros_like(start[:, 0:1]))), np.hstack((predict[:, 1:2], np.ones_like(start[:, 1:2]))))
)
for i in range(M-2):
    Y_augmented = np.vstack(
        (Y_augmented, np.hstack((predict[:, i+2:i+3], (i+2)*np.ones_like(start[:, i+2:i+3]))))
    )
# construct model
m = gpflow.models.VGP(X_augmented, Y_augmented, kern=kern, likelihood=lik, num_latent=1)
gpflow.train.ScipyOptimizer().minimize(m, disp=True)

saver = gpflow.Saver()
saver.save('./model/gp.mdl', m)
Exemplo n.º 8
0
 def __init__(self, sequence, trigger: Trigger, model: gpflow.Parameterized,
              path):
     super().__init__(sequence, trigger)
     self.path = path
     self.model = model
     self.saver = gpflow.Saver()