Beispiel #1
0
    def test_gradient_clipping(self):

        data = tf.keras.layers.Input(shape=[10])

        x = tf.keras.layers.Flatten()(data)
        x = tf.keras.layers.Dense(10, activation='relu')(x)
        predictions = tf.keras.layers.Dense(2, activation='softmax')(x)

        model = tf.keras.models.Model(inputs=data, outputs=predictions)
        model.compile(optimizer=tf.keras.optimizers.SGD(lr=1, clipvalue=1e-8),
                      loss='sparse_categorical_crossentropy',
                      metrics=['accuracy'])
        model = KerasModel(model)

        pre_weights = model.get_weights()

        dataset = self.create_training_dataset()

        # 5 iterations
        model.fit(dataset)

        current_weight = model.get_weights()

        np.all(np.abs((current_weight[0] - pre_weights[0])) < 1e-7)
model = tf.keras.Sequential([
    tf.keras.layers.Dense(inputDim, activation="relu", input_shape=(2, )),
    tf.keras.layers.Dense(inputDim, activation='relu'),
    tf.keras.layers.Dense(outputDim),
])

optimizer = tf.keras.optimizers.Adam()
model.compile(
    optimizer=optimizer,
    loss='mean_squared_error',
)

keras_model = KerasModel(model)
print("Created Keras Model! \n")

# print("batchSize TFDataset: {}".format(training_dataset.batch_size))
# keras_model.fit(x=x.values, y=y.values, epochs=5)
print("Training Complete!\n")
# keras_model.save_model("../resources/savedModels/tfParkModel.h5")

weights = keras_model.get_weights()
# weights = np.array(weights, dtype=object)
# print(weights, type(weights))

kModel = Model()

keras_model.save_weights("../resources/savedModels/keras/weights/wt.h5")

keras_model.save_model("../resources/savedModels/keras/model.h5")