Exemplo n.º 1
0
    def test_callback(self):
        callbacks = [GeneratorLog(self.train_gen, steps_per_train=1, val_gen=self.train_gen, steps_per_val=1,
                                  n_every=1, val_names=['conductivity'], val_units=['S/cm']),
                     ModelCheckpointMAE(filepath='./val_mae_{epoch:05d}_{val_mae:.6f}.hdf5', val_gen=self.train_gen,
                                        steps_per_val=1),
                     ]
        before_fit_file = glob.glob("./val_mae*.hdf5")
        self.model.fit_generator(self.train_gen, steps_per_epoch=1, epochs=1, callbacks=callbacks, verbose=0)
        after_fit_file = glob.glob("./val_mae*.hdf5")

        self.assertEqual(len(before_fit_file), 0)
        self.assertEqual(len(after_fit_file), 1)
        os.remove(after_fit_file[0])

        callback_mae = ModelCheckpointMAE(filepath='./val_mae_{epoch:05d}_{val_mae:.6f}.hdf5', val_gen=self.train_gen,
                                          steps_per_val=1, target_scaler=StandardScaler(1, 1, is_intensive=True))

        dummy_target = np.array([[1, 1], [2, 2]])
        dummy_nb_atoms = np.array([[2], [3]])
        transformed = callback_mae.target_scaler.inverse_transform(dummy_target, dummy_nb_atoms)
        self.assertTrue(np.allclose(transformed, np.array([[2, 2], [3, 3]])))

        callback_mae = ModelCheckpointMAE(filepath='./val_mae_{epoch:05d}_{val_mae:.6f}.hdf5', val_gen=self.train_gen,
                                          steps_per_val=1, target_scaler=StandardScaler(1, 1, is_intensive=False))

        dummy_target = np.array([[1, 1], [2, 2]])
        dummy_nb_atoms = np.array([[2], [3]])
        transformed = callback_mae.target_scaler.inverse_transform(dummy_target, dummy_nb_atoms)
        self.assertTrue(np.allclose(transformed, np.array([[4, 4], [9, 9]])))
Exemplo n.º 2
0
 def test_transform_inverse_transform(self):
     scaler = StandardScaler.from_training_data(self.structures,
                                                self.targets,
                                                is_intensive=False)
     transformed_target = scaler.transform(100, 1)
     orig_target = scaler.inverse_transform(transformed_target, 1)
     self.assertAlmostEqual(100, orig_target)
     scaler = StandardScaler.from_training_data(self.structures,
                                                self.targets,
                                                is_intensive=True)
     transformed_target = scaler.transform(100, 1)
     orig_target = scaler.inverse_transform(transformed_target, 1)
     self.assertAlmostEqual(100, orig_target)
Exemplo n.º 3
0
 def __init__(self,
              model,
              graph_convertor,
              target_scaler=StandardScaler(mean=0, std=1, is_intensive=True),
              **kwargs):
     self.model = model
     self.graph_convertor = graph_convertor
     self.target_scaler = target_scaler
Exemplo n.º 4
0
# === megnet start === #

from megnet.models import MEGNetModel
from megnet.data.graph import GaussianDistance
from megnet.data.crystal import CrystalGraph
from megnet.utils.preprocessing import StandardScaler

from megnet.callbacks import ReduceLRUponNan, ManualStop, XiaotongCB

import numpy as np

gc = CrystalGraph(bond_converter=GaussianDistance(
        np.linspace(0, 5, 100), 0.5), cutoff=4)
model = MEGNetModel(100, 2, graph_converter=gc, lr=1e-4, loss=examine_loss) # , metrics=[examine_loss])
INTENSIVE = False # U0 is an extensive quantity
scaler = StandardScaler.from_training_data(structures, targets, is_intensive=INTENSIVE)
model.target_scaler = scaler

# callbacks = [ReduceLRUponNan(patience=500), ManualStop(), XiaotongCB()]

# change structures to megnet predictable structures
mp_strs = []

train_graphs, train_targets = model.get_all_graphs_targets(structures, targets)
train_nb_atoms = [len(i['atom']) for i in train_graphs]
train_targets = [model.target_scaler.transform(i, j) for i, j in zip(train_targets, train_nb_atoms)]


for s in structures:
    mp_strs.append(model.graph_converter.graph_to_input(model.graph_converter.convert(s)))
Exemplo n.º 5
0
 def test_from_training(self):
     scaler = StandardScaler.from_training_data(self.structures,
                                                self.targets,
                                                is_intensive=False)
     self.assertEqual(scaler.mean, 1)
     self.assertEqual(scaler.std, 1)
Exemplo n.º 6
0
def predict(model):
    MAE = 0
    test_size = len(Q3_s)
    for i in range(test_size):
        MAE += abs(model.predict_structure(Q3_s[i]).ravel() - Q3_t[i])
    MAE /= test_size
    print('MAE is:', MAE)

train_s = Q1_s + Q2_s
train_t = Q1_t + Q2_t

gc = CrystalGraph(bond_converter=GaussianDistance(
        np.linspace(0, 5, 100), 0.5), cutoff=4)
model = MEGNetModel(100, 2, graph_converter=gc, lr=1e-3)
INTENSIVE = False # U0 is an extensive quantity
scaler = StandardScaler.from_training_data(train_s, train_t, is_intensive=INTENSIVE)
model.target_scaler = scaler

callback = tf.keras.callbacks.EarlyStopping(monitor="val_loss", patience=50, restore_best_weights=True)

idx = int(0.8 * len(train_s))


model.train(train_s[:idx], train_t[:idx],
        validation_structures=train_s[idx:],
        validation_targets=train_t[idx:],
        callbacks=[callback],
        epochs=1000,
        save_checkpoint=False,
        automatic_correction=False)