def compile(name,
            model: Sequential,
            train_samples: pd.DataFrame,
            validation_samples: pd.DataFrame,
            gen,
            type='img'):

    # model.add(Reshape((-1, num_classes), name=RESHAPED))
    size = 5
    steps_per_epoch = len(train_samples) // size
    validation_steps = len(validation_samples) // size
    train_generator = gen(train_samples, type)(size, infinite=True)
    validation_generator = gen(validation_samples, type)(size, infinite=True)

    adam = optimizers.Adam(lr=0.0001)
    model.compile(loss='categorical_crossentropy', optimizer=adam)

    history_object = model.fit_generator(train_generator,
                                         validation_data=validation_generator,
                                         epochs=5,
                                         callbacks=None,
                                         validation_steps=validation_steps,
                                         steps_per_epoch=steps_per_epoch)

    model.save_weights(name)
    # model.save('fcn_model.h5')

    print(history_object.history.keys())
    print('Loss')
    print(history_object.history['loss'])

    print('Validation Loss')
    print(history_object.history['val_loss'])
"""
If you need to load weights into a *different* architecture (with some layers in common), for instance for fine-tuning or transfer-learning, you can load weights by *layer name*:
"""
model_yaml.load_weights('to_delete_weights.h5', by_name=True)

# make sure they share the same weights: total 202 parameters
(model_json.get_weights()[0] == model_yaml.get_weights()[0]).sum()
(model_json.get_weights()[1] == model_yaml.get_weights()[1]).sum()
"""
For example
Assume original model looks like this:
"""
model1 = Sequential()
model1.add(Dense(2, input_dim=3, name='dense_1'))
model1.add(Dense(3, name='dense_2'))
model1.save_weights("weights1.h5")

# check out the weights
model1.get_weights()

# new model
model2 = Sequential()
model2.add(Dense(2, input_dim=3, name='dense_1'))  # will be loaded
model2.add(Dense(10, name='new_dense'))  # will not be loaded

# load weights from first model; will only affect the first layer, dense_1.
model2.load_weights("weights1.h5", by_name=True)

# check out the weights
model2.get_weights()
#validation_split=0.2 - 20% for validation set

predictions = model.predict(X_train)

#convert output data to a single number
predictions = np_utils.categorical_probas_to_classes(predictions)

score = model.evaluate(X_test, y_test, verbose=0)
print('Accurate: ', score[1] * 100)

##############
model = Sequential()
model.add(Dense(800, input_dim=784, init="normal", activation='relu'))
model.add(Dense(10, init="normal", activation='softmax'))
model_json = model.to_json()
json_file = open('model.json', 'w')
json_file.write(model_json)
json_file.close()

model.save_weights("weighst.h5")

json_file = open('model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()

loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights('weighst.h5')
loaded_model.compile(loss="categorical_crossentropy",
                     optimizer="SGD",
                     metrics=["accuracy"])