def test_saveload_history(self): # the save function NEEDS a model to work model = Sequential([ Dense(32, input_shape=(5, )), Activation('relu'), Dense(10), Activation('softmax'), ]) data = { 'M1final_mae': [0.3, 0.2], 'val_M1final_mae': [0.31, 0.21], 'M1final_loss': [1.5, 1.3], 'val_M1final_loss': [1.6, 1.4], 'training_run': [1, 1] } history = pd.DataFrame(data=data) history.index.name = 'epoch' try: fileio.safe_model(model, {}, [], [], [], {}, 'test.h5', history=history) _, _, _, _, _, _, history_new = fileio.load_model('test.h5') finally: os.remove('test.h5') np.testing.assert_array_equal(history.columns, history_new.columns) np.testing.assert_array_equal(history.values, history_new.values) assert history_new.index.name == 'epoch'
def test_saveload_model(self): # make and train a very small model inputs = Input(shape=(2, )) dense1 = Dense(10, activation='relu', name='FC_1')(inputs) dense2 = Dense(5, activation='relu', name='FC_2')(dense1) output1 = Dense(1, name='output1')(dense2) output2 = Dense(1, name='output2')(dense2) output3 = Dense(2, activation='softmax', name='output3')(dense2) model = Model(inputs, [output1, output2, output3]) # v1 = np.random.normal(0, 2, 100) # v2 = np.random.normal(0.3, 0.5, 100) # v3 = np.random.normal(-0.3, 0.5, 100) # X = np.array([v1, v2]).T # y = [v3, v3] model.compile(optimizer='adam', loss='mean_squared_error', metrics=['mae']) # history = model.fit(X, y, epochs=1, batch_size=20, shuffle=True) try: fileio.safe_model(model, {}, [], [], [], {}, 'test.h5') model_new, _, _, _, _, _, _ = fileio.load_model('test.h5') finally: os.remove('test.h5') assert model.to_json() == model_new.to_json()
def load_model(self, filename): """ Load a model saved as a pickled file format """ model, processors, features, regressors, classifiers, setup, history = fileio.load_model( filename) self.model = model self.processors = processors self.setup = setup self.features = features self.regressors = regressors self.classifiers = classifiers # load the data and split it in a training - test set. self._prepare_data()
def load_model(self, filename): """ Load a model saved to hdf5 format """ model, processors, features, regressors, classifiers, setup, history = fileio.load_model( filename) self.model = model self.processors = processors self.history = history # TODO: not sure if the add_defaults_to_setup should be run automatically self.setup = setup self.features = features self.regressors = regressors self.classifiers = classifiers # load the data and split it in a training - test set. self._prepare_data()