Exemple #1
0
    def test_layer_tracking(self):
        with self.cached_session():
            model = _get_model(input_shape=(4, ))

            if testing_utils.get_model_type() == 'subclass':
                # Subclassed model must be built separately.
                model._set_inputs(tensor_spec.TensorSpec((None, 4)))

            # Ensure that checkpoints are compatible with another model with the same
            # layers, even if the model isn't built until after initialization.
            layers = _get_layers(input_shape=None, add_input_layer=False)
            model2 = models.Sequential(layers)
            # Build model by calling it.
            model2.predict_on_batch(np.random.random((10, 4)))

            model_path = os.path.join(self.get_temp_dir(), 'model_ckpt')
            model.save_weights(model_path)
            model2_path = os.path.join(self.get_temp_dir(), 'model2_ckpt')
            model2.save_weights(model2_path)

            # Check that the checkpoints are compatible with both models.
            model.load_weights(model2_path)
            self.assertAllClose(self.evaluate(model.weights),
                                self.evaluate(model2.weights))

            model.load_weights(model_path)
            model2.load_weights(model_path)
            self.assertAllClose(self.evaluate(model.weights),
                                self.evaluate(model2.weights))
Exemple #2
0
def deep(features_shape, number_of_classes, activation_function='relu'):
    model = models.Sequential()

    # Input
    model.add(
        layers.InputLayer(input_shape=features_shape,
                          name='Inputs',
                          dtype='float32'))

    # Flatten
    model.add(layers.Flatten(name='Flatten'))

    # Dense block
    model.add(
        layers.Dense(units=512, activation=activation_function, name='Dense1'))
    model.add(
        layers.Dense(units=512, activation=activation_function, name='Dense2'))
    model.add(
        layers.Dense(units=512, activation=activation_function, name='Dense3'))

    # Predictions
    model.add(
        layers.Dense(units=number_of_classes,
                     activation=activation_function,
                     name='Prediction'))

    # Print network summary
    model.summary()

    return model
Exemple #3
0
def keras_estimator(model_dir, config, params):
    """Creates a Keras Sequential model with layers.

	Mean Squared Error (MSE) is a common loss function used for regression.
	A common regression metric is Mean Absolute Error (MAE).

	Args:
		model_dir: (str) file path where training files will be written.
		config: (tf.estimator.RunConfig) Configuration options to save model.
		params: (dict)

	Returns:
		A keras.Model
	"""
    model = models.Sequential()
    model.add(
        Dense(64,
              activation=tf.nn.relu,
              input_shape=(params['num_features'], )))
    model.add(Dense(64, activation=tf.nn.relu))
    model.add(Dense(1))

    # Compile model with learning parameters.
    optimizer = tf.train.RMSPropOptimizer(
        learning_rate=params['learning_rate'])
    model.compile(optimizer=optimizer, loss='mse', metrics=['mae'])

    return tf.keras.estimator.model_to_estimator(keras_model=model,
                                                 model_dir=model_dir,
                                                 config=config)
Exemple #4
0
def train(training_data,
          targets,
          epochs,
          batch_size,
          validation_split,
          hidden_neurons=5):
    # Resize data matrix to have 3 dimensions
    training_data = training_data[:, numpy.newaxis, :]
    targets = targets.transpose()

    training_model = models.Sequential()
    training_model.add(layers.LSTM(hidden_neurons, return_sequences=True))
    training_model.add(layers.LSTM(hidden_neurons, return_sequences=True))
    training_model.add(layers.LSTM(hidden_neurons, return_sequences=True))
    training_model.add(layers.LSTM(hidden_neurons, return_sequences=True))
    training_model.add(layers.LSTM(hidden_neurons))
    training_model.add(layers.Dense(1, activation='sigmoid'))

    training_model.compile(loss='binary_crossentropy',
                           optimizer='Adam',
                           metrics=['accuracy'])
    training_history = training_model.fit(training_data,
                                          targets,
                                          epochs=epochs,
                                          batch_size=batch_size,
                                          validation_split=validation_split)

    return training_model, training_history